From bcddddbdb56f86a4e09ecbc86ec18354e18ba56b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 11 Jul 2023 23:49:27 +0000 Subject: [PATCH 1/4] feat: Add routing information in Cloud Build GRPC clients PiperOrigin-RevId: 547283196 Source-Link: https://github.com/googleapis/googleapis/commit/64d54ff78cfe7cdc2f4b95717dc9afa3ef32a29a Source-Link: https://github.com/googleapis/googleapis-gen/commit/1de18221109b4a025c0b2e8c25324cb01b6df626 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWRlMTgyMjExMDliNGEwMjVjMGIyZThjMjUzMjRjYjAxYjZkZjYyNiJ9 --- owl-bot-staging/v1/.coveragerc | 13 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + .../v1/docs/cloudbuild_v1/cloud_build.rst | 10 + .../v1/docs/cloudbuild_v1/services.rst | 6 + .../v1/docs/cloudbuild_v1/types.rst | 6 + owl-bot-staging/v1/docs/conf.py | 376 + owl-bot-staging/v1/docs/index.rst | 7 + .../cloud/devtools/cloudbuild/__init__.py | 151 + .../devtools/cloudbuild/gapic_version.py | 16 + .../google/cloud/devtools/cloudbuild/py.typed | 2 + .../cloud/devtools/cloudbuild_v1/__init__.py | 152 + .../cloudbuild_v1/gapic_metadata.json | 298 + .../devtools/cloudbuild_v1/gapic_version.py | 16 + .../cloud/devtools/cloudbuild_v1/py.typed | 2 + .../cloudbuild_v1/services/__init__.py | 15 + .../services/cloud_build/__init__.py | 22 + .../services/cloud_build/async_client.py | 2601 ++++ .../services/cloud_build/client.py | 2899 +++++ .../services/cloud_build/pagers.py | 381 + .../cloud_build/transports/__init__.py | 38 + .../services/cloud_build/transports/base.py | 443 + .../services/cloud_build/transports/grpc.py | 793 ++ .../cloud_build/transports/grpc_asyncio.py | 792 ++ .../services/cloud_build/transports/rest.py | 2419 ++++ .../devtools/cloudbuild_v1/types/__init__.py | 144 + .../cloudbuild_v1/types/cloudbuild.py | 3680 ++++++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...nerated_cloud_build_approve_build_async.py | 56 + ...enerated_cloud_build_approve_build_sync.py | 56 + ...enerated_cloud_build_cancel_build_async.py | 53 + ...generated_cloud_build_cancel_build_sync.py | 53 + ...enerated_cloud_build_create_build_async.py | 56 + ...generated_cloud_build_create_build_sync.py | 56 + ..._cloud_build_create_build_trigger_async.py | 56 + ...d_cloud_build_create_build_trigger_sync.py | 56 + ...ed_cloud_build_create_worker_pool_async.py | 57 + ...ted_cloud_build_create_worker_pool_sync.py | 57 + ..._cloud_build_delete_build_trigger_async.py | 51 + ...d_cloud_build_delete_build_trigger_sync.py | 51 + ...ed_cloud_build_delete_worker_pool_async.py | 56 + ...ted_cloud_build_delete_worker_pool_sync.py | 56 + ...1_generated_cloud_build_get_build_async.py | 53 + ...v1_generated_cloud_build_get_build_sync.py | 53 + ...ted_cloud_build_get_build_trigger_async.py | 53 + ...ated_cloud_build_get_build_trigger_sync.py | 53 + ...rated_cloud_build_get_worker_pool_async.py | 52 + ...erated_cloud_build_get_worker_pool_sync.py | 52 + ...d_cloud_build_list_build_triggers_async.py | 53 + ...ed_cloud_build_list_build_triggers_sync.py | 53 + ...generated_cloud_build_list_builds_async.py | 53 + ..._generated_cloud_build_list_builds_sync.py | 53 + ...ted_cloud_build_list_worker_pools_async.py | 53 + ...ated_cloud_build_list_worker_pools_sync.py | 53 + ...oud_build_receive_trigger_webhook_async.py | 51 + ...loud_build_receive_trigger_webhook_sync.py | 51 + ...generated_cloud_build_retry_build_async.py | 57 + ..._generated_cloud_build_retry_build_sync.py | 57 + ...ted_cloud_build_run_build_trigger_async.py | 57 + ...ated_cloud_build_run_build_trigger_sync.py | 57 + ..._cloud_build_update_build_trigger_async.py | 57 + ...d_cloud_build_update_build_trigger_sync.py | 57 + ...ed_cloud_build_update_worker_pool_async.py | 55 + ...ted_cloud_build_update_worker_pool_sync.py | 55 + ...etadata_google.devtools.cloudbuild.v1.json | 3027 +++++ .../scripts/fixup_cloudbuild_v1_keywords.py | 193 + owl-bot-staging/v1/setup.py | 90 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 9 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/cloudbuild_v1/__init__.py | 16 + .../gapic/cloudbuild_v1/test_cloud_build.py | 10282 ++++++++++++++++ owl-bot-staging/v2/.coveragerc | 13 + owl-bot-staging/v2/.flake8 | 33 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + .../docs/cloudbuild_v2/repository_manager.rst | 10 + .../v2/docs/cloudbuild_v2/services.rst | 6 + .../v2/docs/cloudbuild_v2/types.rst | 6 + owl-bot-staging/v2/docs/conf.py | 376 + owl-bot-staging/v2/docs/index.rst | 7 + .../cloud/devtools/cloudbuild/__init__.py | 93 + .../devtools/cloudbuild/gapic_version.py | 16 + .../google/cloud/devtools/cloudbuild/py.typed | 2 + .../cloud/devtools/cloudbuild_v2/__init__.py | 94 + .../cloudbuild_v2/gapic_metadata.json | 238 + .../devtools/cloudbuild_v2/gapic_version.py | 16 + .../cloud/devtools/cloudbuild_v2/py.typed | 2 + .../cloudbuild_v2/services/__init__.py | 15 + .../services/repository_manager/__init__.py | 22 + .../repository_manager/async_client.py | 2257 ++++ .../services/repository_manager/client.py | 2445 ++++ .../services/repository_manager/pagers.py | 381 + .../repository_manager/transports/__init__.py | 38 + .../repository_manager/transports/base.py | 431 + .../repository_manager/transports/grpc.py | 743 ++ .../transports/grpc_asyncio.py | 742 ++ .../repository_manager/transports/rest.py | 2275 ++++ .../devtools/cloudbuild_v2/types/__init__.py | 88 + .../cloudbuild_v2/types/cloudbuild.py | 159 + .../cloudbuild_v2/types/repositories.py | 1104 ++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 184 + ...manager_batch_create_repositories_async.py | 62 + ..._manager_batch_create_repositories_sync.py | 62 + ...ository_manager_create_connection_async.py | 57 + ...pository_manager_create_connection_sync.py | 57 + ...ository_manager_create_repository_async.py | 61 + ...pository_manager_create_repository_sync.py | 61 + ...ository_manager_delete_connection_async.py | 56 + ...pository_manager_delete_connection_sync.py | 56 + ...ository_manager_delete_repository_async.py | 56 + ...pository_manager_delete_repository_sync.py | 56 + ...repository_manager_fetch_git_refs_async.py | 52 + ..._repository_manager_fetch_git_refs_sync.py | 52 + ...nager_fetch_linkable_repositories_async.py | 53 + ...anager_fetch_linkable_repositories_sync.py | 53 + ...pository_manager_fetch_read_token_async.py | 52 + ...epository_manager_fetch_read_token_sync.py | 52 + ...ry_manager_fetch_read_write_token_async.py | 52 + ...ory_manager_fetch_read_write_token_sync.py | 52 + ...repository_manager_get_connection_async.py | 52 + ..._repository_manager_get_connection_sync.py | 52 + ...repository_manager_get_repository_async.py | 52 + ..._repository_manager_get_repository_sync.py | 52 + ...pository_manager_list_connections_async.py | 53 + ...epository_manager_list_connections_sync.py | 53 + ...ository_manager_list_repositories_async.py | 53 + ...pository_manager_list_repositories_sync.py | 53 + ...ository_manager_update_connection_async.py | 55 + ...pository_manager_update_connection_sync.py | 55 + ...etadata_google.devtools.cloudbuild.v2.json | 2309 ++++ .../scripts/fixup_cloudbuild_v2_keywords.py | 189 + owl-bot-staging/v2/setup.py | 91 + .../v2/testing/constraints-3.10.txt | 7 + .../v2/testing/constraints-3.11.txt | 7 + .../v2/testing/constraints-3.12.txt | 7 + .../v2/testing/constraints-3.7.txt | 10 + .../v2/testing/constraints-3.8.txt | 7 + .../v2/testing/constraints-3.9.txt | 7 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/cloudbuild_v2/__init__.py | 16 + .../cloudbuild_v2/test_repository_manager.py | 9596 ++++++++++++++ 153 files changed, 56882 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/types.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json create mode 100644 owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/.flake8 create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json create mode 100644 owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 00000000..a0cf72db --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/devtools/cloudbuild/__init__.py + google/cloud/devtools/cloudbuild/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 00000000..af14cd40 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/devtools/cloudbuild *.py +recursive-include google/cloud/devtools/cloudbuild_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 00000000..c788a1b3 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Devtools Cloudbuild API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Devtools Cloudbuild API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst new file mode 100644 index 00000000..be81dc5c --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst @@ -0,0 +1,10 @@ +CloudBuild +---------------------------- + +.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build + :members: + :inherited-members: + +.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst new file mode 100644 index 00000000..c0bdc88d --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Devtools Cloudbuild v1 API +==================================================== +.. toctree:: + :maxdepth: 2 + + cloud_build diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst new file mode 100644 index 00000000..0e955742 --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Devtools Cloudbuild v1 API +================================================= + +.. automodule:: google.cloud.devtools.cloudbuild_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 00000000..4bd8e2dd --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-build documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-build" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Devtools Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-build-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-build.tex", + u"google-cloud-build Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-build", + u"Google Cloud Devtools Cloudbuild Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-build", + u"google-cloud-build Documentation", + author, + "google-cloud-build", + "GAPIC library for Google Cloud Devtools Cloudbuild API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 00000000..37eed237 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudbuild_v1/services + cloudbuild_v1/types diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py new file mode 100644 index 00000000..06ff95e8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient + +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalResult +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApproveBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildApproval +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOptions +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildStep +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildTrigger +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import InlineSecret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PrivatePoolV1Config +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PubsubConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PullRequestFilter +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PushFilter +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepositoryEventConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepoSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Results +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RetryBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RunBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SecretManagerSecret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secrets +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Source +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSourceManifest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedMavenArtifact +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedNpmPackage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedPythonPackage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WebhookConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool + +__all__ = ('CloudBuildClient', + 'CloudBuildAsyncClient', + 'ApprovalConfig', + 'ApprovalResult', + 'ApproveBuildRequest', + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildApproval', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolOperationMetadata', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolOperationMetadata', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'GitSource', + 'Hash', + 'InlineSecret', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'PrivatePoolV1Config', + 'PubsubConfig', + 'PullRequestFilter', + 'PushFilter', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'RepositoryEventConfig', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'SecretManagerSecret', + 'Secrets', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'StorageSourceManifest', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolRequest', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'UploadedPythonPackage', + 'Volume', + 'WebhookConfig', + 'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py new file mode 100644 index 00000000..9fcffdb4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_build import CloudBuildClient +from .services.cloud_build import CloudBuildAsyncClient + +from .types.cloudbuild import ApprovalConfig +from .types.cloudbuild import ApprovalResult +from .types.cloudbuild import ApproveBuildRequest +from .types.cloudbuild import ArtifactResult +from .types.cloudbuild import Artifacts +from .types.cloudbuild import Build +from .types.cloudbuild import BuildApproval +from .types.cloudbuild import BuildOperationMetadata +from .types.cloudbuild import BuildOptions +from .types.cloudbuild import BuildStep +from .types.cloudbuild import BuildTrigger +from .types.cloudbuild import BuiltImage +from .types.cloudbuild import CancelBuildRequest +from .types.cloudbuild import CreateBuildRequest +from .types.cloudbuild import CreateBuildTriggerRequest +from .types.cloudbuild import CreateWorkerPoolOperationMetadata +from .types.cloudbuild import CreateWorkerPoolRequest +from .types.cloudbuild import DeleteBuildTriggerRequest +from .types.cloudbuild import DeleteWorkerPoolOperationMetadata +from .types.cloudbuild import DeleteWorkerPoolRequest +from .types.cloudbuild import FileHashes +from .types.cloudbuild import GetBuildRequest +from .types.cloudbuild import GetBuildTriggerRequest +from .types.cloudbuild import GetWorkerPoolRequest +from .types.cloudbuild import GitHubEventsConfig +from .types.cloudbuild import GitSource +from .types.cloudbuild import Hash +from .types.cloudbuild import InlineSecret +from .types.cloudbuild import ListBuildsRequest +from .types.cloudbuild import ListBuildsResponse +from .types.cloudbuild import ListBuildTriggersRequest +from .types.cloudbuild import ListBuildTriggersResponse +from .types.cloudbuild import ListWorkerPoolsRequest +from .types.cloudbuild import ListWorkerPoolsResponse +from .types.cloudbuild import PrivatePoolV1Config +from .types.cloudbuild import PubsubConfig +from .types.cloudbuild import PullRequestFilter +from .types.cloudbuild import PushFilter +from .types.cloudbuild import ReceiveTriggerWebhookRequest +from .types.cloudbuild import ReceiveTriggerWebhookResponse +from .types.cloudbuild import RepositoryEventConfig +from .types.cloudbuild import RepoSource +from .types.cloudbuild import Results +from .types.cloudbuild import RetryBuildRequest +from .types.cloudbuild import RunBuildTriggerRequest +from .types.cloudbuild import Secret +from .types.cloudbuild import SecretManagerSecret +from .types.cloudbuild import Secrets +from .types.cloudbuild import Source +from .types.cloudbuild import SourceProvenance +from .types.cloudbuild import StorageSource +from .types.cloudbuild import StorageSourceManifest +from .types.cloudbuild import TimeSpan +from .types.cloudbuild import UpdateBuildTriggerRequest +from .types.cloudbuild import UpdateWorkerPoolOperationMetadata +from .types.cloudbuild import UpdateWorkerPoolRequest +from .types.cloudbuild import UploadedMavenArtifact +from .types.cloudbuild import UploadedNpmPackage +from .types.cloudbuild import UploadedPythonPackage +from .types.cloudbuild import Volume +from .types.cloudbuild import WebhookConfig +from .types.cloudbuild import WorkerPool + +__all__ = ( + 'CloudBuildAsyncClient', +'ApprovalConfig', +'ApprovalResult', +'ApproveBuildRequest', +'ArtifactResult', +'Artifacts', +'Build', +'BuildApproval', +'BuildOperationMetadata', +'BuildOptions', +'BuildStep', +'BuildTrigger', +'BuiltImage', +'CancelBuildRequest', +'CloudBuildClient', +'CreateBuildRequest', +'CreateBuildTriggerRequest', +'CreateWorkerPoolOperationMetadata', +'CreateWorkerPoolRequest', +'DeleteBuildTriggerRequest', +'DeleteWorkerPoolOperationMetadata', +'DeleteWorkerPoolRequest', +'FileHashes', +'GetBuildRequest', +'GetBuildTriggerRequest', +'GetWorkerPoolRequest', +'GitHubEventsConfig', +'GitSource', +'Hash', +'InlineSecret', +'ListBuildTriggersRequest', +'ListBuildTriggersResponse', +'ListBuildsRequest', +'ListBuildsResponse', +'ListWorkerPoolsRequest', +'ListWorkerPoolsResponse', +'PrivatePoolV1Config', +'PubsubConfig', +'PullRequestFilter', +'PushFilter', +'ReceiveTriggerWebhookRequest', +'ReceiveTriggerWebhookResponse', +'RepoSource', +'RepositoryEventConfig', +'Results', +'RetryBuildRequest', +'RunBuildTriggerRequest', +'Secret', +'SecretManagerSecret', +'Secrets', +'Source', +'SourceProvenance', +'StorageSource', +'StorageSourceManifest', +'TimeSpan', +'UpdateBuildTriggerRequest', +'UpdateWorkerPoolOperationMetadata', +'UpdateWorkerPoolRequest', +'UploadedMavenArtifact', +'UploadedNpmPackage', +'UploadedPythonPackage', +'Volume', +'WebhookConfig', +'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json new file mode 100644 index 00000000..2648fd24 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json @@ -0,0 +1,298 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.devtools.cloudbuild_v1", + "protoPackage": "google.devtools.cloudbuild.v1", + "schema": "1.0", + "services": { + "CloudBuild": { + "clients": { + "grpc": { + "libraryClient": "CloudBuildClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudBuildAsyncClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + }, + "rest": { + "libraryClient": "CloudBuildClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py new file mode 100644 index 00000000..89a37dc9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py new file mode 100644 index 00000000..b796e7cd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudBuildClient +from .async_client import CloudBuildAsyncClient + +__all__ = ( + 'CloudBuildClient', + 'CloudBuildAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py new file mode 100644 index 00000000..00eb8e04 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -0,0 +1,2601 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .client import CloudBuildClient + + +class CloudBuildAsyncClient: + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + """ + + _client: CloudBuildClient + + DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT + + build_path = staticmethod(CloudBuildClient.build_path) + parse_build_path = staticmethod(CloudBuildClient.parse_build_path) + build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) + parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) + crypto_key_path = staticmethod(CloudBuildClient.crypto_key_path) + parse_crypto_key_path = staticmethod(CloudBuildClient.parse_crypto_key_path) + network_path = staticmethod(CloudBuildClient.network_path) + parse_network_path = staticmethod(CloudBuildClient.parse_network_path) + repository_path = staticmethod(CloudBuildClient.repository_path) + parse_repository_path = staticmethod(CloudBuildClient.parse_repository_path) + secret_version_path = staticmethod(CloudBuildClient.secret_version_path) + parse_secret_version_path = staticmethod(CloudBuildClient.parse_secret_version_path) + service_account_path = staticmethod(CloudBuildClient.service_account_path) + parse_service_account_path = staticmethod(CloudBuildClient.parse_service_account_path) + subscription_path = staticmethod(CloudBuildClient.subscription_path) + parse_subscription_path = staticmethod(CloudBuildClient.parse_subscription_path) + topic_path = staticmethod(CloudBuildClient.topic_path) + parse_topic_path = staticmethod(CloudBuildClient.parse_topic_path) + worker_pool_path = staticmethod(CloudBuildClient.worker_pool_path) + parse_worker_pool_path = staticmethod(CloudBuildClient.parse_worker_pool_path) + common_billing_account_path = staticmethod(CloudBuildClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudBuildClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudBuildClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudBuildClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudBuildClient.parse_common_organization_path) + common_project_path = staticmethod(CloudBuildClient.common_project_path) + parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) + common_location_path = staticmethod(CloudBuildClient.common_location_path) + parse_common_location_path = staticmethod(CloudBuildClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildAsyncClient: The constructed client. + """ + return CloudBuildClient.from_service_account_info.__func__(CloudBuildAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildAsyncClient: The constructed client. + """ + return CloudBuildClient.from_service_account_file.__func__(CloudBuildAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudBuildClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudBuildTransport: + """Returns the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudBuildTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud build client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudBuildTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudBuildClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_build(self, + request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + build: Optional[cloudbuild.Build] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]]): + The request object. Request to create a new build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + build (:class:`google.cloud.devtools.cloudbuild_v1.types.Build`): + Required. Build resource to create. + This corresponds to the ``build`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_build(self, + request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.get_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]]): + The request object. Request to get a build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_build, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_builds(self, + request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsAsyncPager: + r"""Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]]): + The request object. Request to list builds. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The raw filter text to constrain the + results. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager: + Response including listed builds. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListBuildsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_builds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBuildsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_build(self, + request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Cancels a build in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.cancel_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]]): + The request object. Request to cancel an ongoing build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CancelBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retry_build(self, + request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]]): + The request object. Specifies a build to retry. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. Build ID of the original + build. + + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.RetryBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retry_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def approve_build(self, + request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, + *, + name: Optional[str] = None, + approval_result: Optional[cloudbuild.ApprovalResult] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]]): + The request object. Request to approve or reject a + pending build. + name (:class:`str`): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + approval_result (:class:`google.cloud.devtools.cloudbuild_v1.types.ApprovalResult`): + Approval decision and metadata. + This corresponds to the ``approval_result`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, approval_result]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ApproveBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if approval_result is not None: + request.approval_result = approval_result + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.approve_build, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def create_build_trigger(self, + request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Creates a new ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.create_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]]): + The request object. Request to create a new ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project for which + to configure automatic builds. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): + Required. ``BuildTrigger`` to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_build_trigger(self, + request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Returns information about a ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = await client.get_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]]): + The request object. Returns the ``BuildTrigger`` with the specified ID. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_build_triggers(self, + request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersAsyncPager: + r"""Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]]): + The request object. Request to list existing ``BuildTriggers``. + project_id (:class:`str`): + Required. ID of the project for which + to list BuildTriggers. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager: + Response containing existing BuildTriggers. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListBuildTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_build_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBuildTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_build_trigger(self, + request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + await client.delete_build_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]]): + The request object. Request to delete a ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the ``BuildTrigger`` to delete. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.DeleteBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_build_trigger(self, + request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.update_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]]): + The request object. Request to update an existing ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the ``BuildTrigger`` to update. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): + Required. ``BuildTrigger`` to update. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.UpdateBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_build_trigger(self, + request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + source: Optional[cloudbuild.RepoSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]]): + The request object. Specifies a build trigger to run and + the source to use. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the trigger. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): + Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + + This corresponds to the ``source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.RunBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def receive_trigger_webhook(self, + request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = await client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]]): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the response object for the + ReceiveTriggerWebhook method. + + """ + # Create or coerce a protobuf request object. + request = cloudbuild.ReceiveTriggerWebhookRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.receive_trigger_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger", request.trigger), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_worker_pool(self, + request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, + *, + parent: Optional[str] = None, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + worker_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]]): + The request object. Request to create a new ``WorkerPool``. + parent (:class:`str`): + Required. The parent resource where this worker pool + will be created. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): + Required. ``WorkerPool`` resource to create. + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool_id (:class:`str`): + Required. Immutable. The ID to use for the + ``WorkerPool``, which will become the final component of + the resource name. + + This value should be 1-63 characters, and valid + characters are /[a-z][0-9]-/. + + This corresponds to the ``worker_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, worker_pool, worker_pool_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if worker_pool is not None: + request.worker_pool = worker_pool + if worker_pool_id is not None: + request.worker_pool_id = worker_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_worker_pool(self, + request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: + r"""Returns details of a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_worker_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]]): + The request object. Request to get a ``WorkerPool`` with the specified name. + name (:class:`str`): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.WorkerPool: + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_worker_pool, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_worker_pool(self, + request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]]): + The request object. Request to delete a ``WorkerPool``. + name (:class:`str`): + Required. The name of the ``WorkerPool`` to delete. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.DeleteWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_worker_pool(self, + request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, + *, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]]): + The request object. Request to update a ``WorkerPool``. + worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the + ``WorkerPool`` to update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask specifying which fields in ``worker_pool`` to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([worker_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.UpdateWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if worker_pool is not None: + request.worker_pool = worker_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("worker_pool.name", request.worker_pool.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def list_worker_pools(self, + request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkerPoolsAsyncPager: + r"""Lists ``WorkerPool``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]]): + The request object. Request to list ``WorkerPool``\ s. + parent (:class:`str`): + Required. The parent of the collection of + ``WorkerPools``. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager: + Response containing existing WorkerPools. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListWorkerPoolsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_worker_pools, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkerPoolsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudBuildAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudBuildAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py new file mode 100644 index 00000000..23c5a8ec --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -0,0 +1,2899 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudBuildGrpcTransport +from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .transports.rest import CloudBuildRestTransport + + +class CloudBuildClientMeta(type): + """Metaclass for the CloudBuild client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] + _transport_registry["grpc"] = CloudBuildGrpcTransport + _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport + _transport_registry["rest"] = CloudBuildRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CloudBuildTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudBuildClient(metaclass=CloudBuildClientMeta): + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudBuildTransport: + """Returns the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def build_path(project: str,build: str,) -> str: + """Returns a fully-qualified build string.""" + return "projects/{project}/builds/{build}".format(project=project, build=build, ) + + @staticmethod + def parse_build_path(path: str) -> Dict[str,str]: + """Parses a build path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def build_trigger_path(project: str,location: str,trigger: str,) -> str: + """Returns a fully-qualified build_trigger string.""" + return "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + + @staticmethod + def parse_build_trigger_path(path: str) -> Dict[str,str]: + """Parses a build_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,keyring: str,key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str,network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str,str]: + """Parses a network path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def repository_path(project: str,location: str,connection: str,repository: str,) -> str: + """Returns a fully-qualified repository string.""" + return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + + @staticmethod + def parse_repository_path(path: str) -> Dict[str,str]: + """Parses a repository path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def secret_version_path(project: str,secret: str,version: str,) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str,str]: + """Parses a secret_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path(project: str,service_account: str,) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str,str]: + """Parses a service_account path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str,subscription: str,) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str,str]: + """Parses a subscription path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def topic_path(project: str,topic: str,) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str,str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def worker_pool_path(project: str,location: str,worker_pool: str,) -> str: + """Returns a fully-qualified worker_pool string.""" + return "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) + + @staticmethod + def parse_worker_pool_path(path: str) -> Dict[str,str]: + """Parses a worker_pool path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudBuildTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud build client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudBuildTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudBuildTransport): + # transport is a CloudBuildTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_build(self, + request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + build: Optional[cloudbuild.Build] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]): + The request object. Request to create a new build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Required. Build resource to create. + This corresponds to the ``build`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildRequest): + request = cloudbuild.CreateBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def get_build(self, + request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.get_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]): + The request object. Request to get a build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildRequest): + request = cloudbuild.GetBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_builds(self, + request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsPager: + r"""Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]): + The request object. Request to list builds. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The raw filter text to constrain the + results. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager: + Response including listed builds. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildsRequest): + request = cloudbuild.ListBuildsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_builds] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBuildsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_build(self, + request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Cancels a build in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.cancel_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]): + The request object. Request to cancel an ongoing build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CancelBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CancelBuildRequest): + request = cloudbuild.CancelBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retry_build(self, + request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]): + The request object. Specifies a build to retry. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. Build ID of the original + build. + + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RetryBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RetryBuildRequest): + request = cloudbuild.RetryBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.retry_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def approve_build(self, + request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, + *, + name: Optional[str] = None, + approval_result: Optional[cloudbuild.ApprovalResult] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]): + The request object. Request to approve or reject a + pending build. + name (str): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Approval decision and metadata. + This corresponds to the ``approval_result`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, approval_result]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ApproveBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ApproveBuildRequest): + request = cloudbuild.ApproveBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if approval_result is not None: + request.approval_result = approval_result + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.approve_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def create_build_trigger(self, + request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Creates a new ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = client.create_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]): + The request object. Request to create a new ``BuildTrigger``. + project_id (str): + Required. ID of the project for which + to configure automatic builds. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): + request = cloudbuild.CreateBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_build_trigger(self, + request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Returns information about a ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = client.get_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]): + The request object. Returns the ``BuildTrigger`` with the specified ID. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildTriggerRequest): + request = cloudbuild.GetBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_build_triggers(self, + request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersPager: + r"""Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]): + The request object. Request to list existing ``BuildTriggers``. + project_id (str): + Required. ID of the project for which + to list BuildTriggers. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager: + Response containing existing BuildTriggers. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildTriggersRequest): + request = cloudbuild.ListBuildTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBuildTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_build_trigger(self, + request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + client.delete_build_trigger(request=request) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]): + The request object. Request to delete a ``BuildTrigger``. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to delete. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): + request = cloudbuild.DeleteBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_build_trigger(self, + request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = client.update_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]): + The request object. Request to update an existing ``BuildTrigger``. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to update. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to update. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): + request = cloudbuild.UpdateBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.trigger.resource_name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_build_trigger(self, + request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + source: Optional[cloudbuild.RepoSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]): + The request object. Specifies a build trigger to run and + the source to use. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the trigger. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + + This corresponds to the ``source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RunBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RunBuildTriggerRequest): + request = cloudbuild.RunBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def receive_trigger_webhook(self, + request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the response object for the + ReceiveTriggerWebhook method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ReceiveTriggerWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ReceiveTriggerWebhookRequest): + request = cloudbuild.ReceiveTriggerWebhookRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.receive_trigger_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger", request.trigger), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_worker_pool(self, + request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, + *, + parent: Optional[str] = None, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + worker_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]): + The request object. Request to create a new ``WorkerPool``. + parent (str): + Required. The parent resource where this worker pool + will be created. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. ``WorkerPool`` resource to create. + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool_id (str): + Required. Immutable. The ID to use for the + ``WorkerPool``, which will become the final component of + the resource name. + + This value should be 1-63 characters, and valid + characters are /[a-z][0-9]-/. + + This corresponds to the ``worker_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, worker_pool, worker_pool_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): + request = cloudbuild.CreateWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if worker_pool is not None: + request.worker_pool = worker_pool + if worker_pool_id is not None: + request.worker_pool_id = worker_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def get_worker_pool(self, + request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: + r"""Returns details of a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_worker_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]): + The request object. Request to get a ``WorkerPool`` with the specified name. + name (str): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.WorkerPool: + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetWorkerPoolRequest): + request = cloudbuild.GetWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_worker_pool(self, + request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]): + The request object. Request to delete a ``WorkerPool``. + name (str): + Required. The name of the ``WorkerPool`` to delete. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): + request = cloudbuild.DeleteWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def update_worker_pool(self, + request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, + *, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]): + The request object. Request to update a ``WorkerPool``. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the + ``WorkerPool`` to update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask specifying which fields in ``worker_pool`` to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([worker_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): + request = cloudbuild.UpdateWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if worker_pool is not None: + request.worker_pool = worker_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.worker_pool.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def list_worker_pools(self, + request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkerPoolsPager: + r"""Lists ``WorkerPool``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]): + The request object. Request to list ``WorkerPool``\ s. + parent (str): + Required. The parent of the collection of + ``WorkerPools``. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager: + Response containing existing WorkerPools. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListWorkerPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): + request = cloudbuild.ListWorkerPoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkerPoolsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudBuildClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudBuildClient", +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py new file mode 100644 index 00000000..f255025e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild + + +class ListBuildsPager: + """A pager for iterating through ``list_builds`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``builds`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuilds`` requests and continue to iterate + through the ``builds`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListBuildsResponse], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListBuildsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.Build]: + for page in self.pages: + yield from page.builds + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildsAsyncPager: + """A pager for iterating through ``list_builds`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``builds`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuilds`` requests and continue to iterate + through the ``builds`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListBuildsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.Build]: + async def async_generator(): + async for page in self.pages: + for response in page.builds: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildTriggersPager: + """A pager for iterating through ``list_build_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuildTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListBuildTriggersResponse], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListBuildTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.BuildTrigger]: + for page in self.pages: + yield from page.triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildTriggersAsyncPager: + """A pager for iterating through ``list_build_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuildTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListBuildTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.BuildTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListWorkerPoolsPager: + """A pager for iterating through ``list_worker_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``worker_pools`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkerPools`` requests and continue to iterate + through the ``worker_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListWorkerPoolsResponse], + request: cloudbuild.ListWorkerPoolsRequest, + response: cloudbuild.ListWorkerPoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListWorkerPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListWorkerPoolsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.WorkerPool]: + for page in self.pages: + yield from page.worker_pools + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListWorkerPoolsAsyncPager: + """A pager for iterating through ``list_worker_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``worker_pools`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkerPools`` requests and continue to iterate + through the ``worker_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListWorkerPoolsResponse]], + request: cloudbuild.ListWorkerPoolsRequest, + response: cloudbuild.ListWorkerPoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListWorkerPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListWorkerPoolsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.WorkerPool]: + async def async_generator(): + async for page in self.pages: + for response in page.worker_pools: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py new file mode 100644 index 00000000..d7a78973 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudBuildTransport +from .grpc import CloudBuildGrpcTransport +from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .rest import CloudBuildRestTransport +from .rest import CloudBuildRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] +_transport_registry['grpc'] = CloudBuildGrpcTransport +_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport +_transport_registry['rest'] = CloudBuildRestTransport + +__all__ = ( + 'CloudBuildTransport', + 'CloudBuildGrpcTransport', + 'CloudBuildGrpcAsyncIOTransport', + 'CloudBuildRestTransport', + 'CloudBuildRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py new file mode 100644 index 00000000..cf5f61cc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class CloudBuildTransport(abc.ABC): + """Abstract transport class for CloudBuild.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudbuild.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_build: gapic_v1.method.wrap_method( + self.create_build, + default_timeout=600.0, + client_info=client_info, + ), + self.get_build: gapic_v1.method.wrap_method( + self.get_build, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_builds: gapic_v1.method.wrap_method( + self.list_builds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.cancel_build: gapic_v1.method.wrap_method( + self.cancel_build, + default_timeout=600.0, + client_info=client_info, + ), + self.retry_build: gapic_v1.method.wrap_method( + self.retry_build, + default_timeout=600.0, + client_info=client_info, + ), + self.approve_build: gapic_v1.method.wrap_method( + self.approve_build, + default_timeout=None, + client_info=client_info, + ), + self.create_build_trigger: gapic_v1.method.wrap_method( + self.create_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.get_build_trigger: gapic_v1.method.wrap_method( + self.get_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_build_triggers: gapic_v1.method.wrap_method( + self.list_build_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_build_trigger: gapic_v1.method.wrap_method( + self.delete_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_build_trigger: gapic_v1.method.wrap_method( + self.update_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.run_build_trigger: gapic_v1.method.wrap_method( + self.run_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.receive_trigger_webhook: gapic_v1.method.wrap_method( + self.receive_trigger_webhook, + default_timeout=None, + client_info=client_info, + ), + self.create_worker_pool: gapic_v1.method.wrap_method( + self.create_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.get_worker_pool: gapic_v1.method.wrap_method( + self.get_worker_pool, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_worker_pool: gapic_v1.method.wrap_method( + self.delete_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.update_worker_pool: gapic_v1.method.wrap_method( + self.update_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.list_worker_pools: gapic_v1.method.wrap_method( + self.list_worker_pools, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + Union[ + cloudbuild.Build, + Awaitable[cloudbuild.Build] + ]]: + raise NotImplementedError() + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + Union[ + cloudbuild.ListBuildsResponse, + Awaitable[cloudbuild.ListBuildsResponse] + ]]: + raise NotImplementedError() + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + Union[ + cloudbuild.Build, + Awaitable[cloudbuild.Build] + ]]: + raise NotImplementedError() + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Union[ + cloudbuild.ListBuildTriggersResponse, + Awaitable[cloudbuild.ListBuildTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + Union[ + cloudbuild.ReceiveTriggerWebhookResponse, + Awaitable[cloudbuild.ReceiveTriggerWebhookResponse] + ]]: + raise NotImplementedError() + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + Union[ + cloudbuild.WorkerPool, + Awaitable[cloudbuild.WorkerPool] + ]]: + raise NotImplementedError() + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Union[ + cloudbuild.ListWorkerPoolsResponse, + Awaitable[cloudbuild.ListWorkerPoolsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CloudBuildTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py new file mode 100644 index 00000000..08b246b2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO + + +class CloudBuildGrpcTransport(CloudBuildTransport): + """gRPC backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the create build method over gRPC. + + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + Returns: + Callable[[~.CreateBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + request_serializer=cloudbuild.CreateBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_build'] + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + cloudbuild.Build]: + r"""Return a callable for the get build method over gRPC. + + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + Returns: + Callable[[~.GetBuildRequest], + ~.Build]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + request_serializer=cloudbuild.GetBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['get_build'] + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + cloudbuild.ListBuildsResponse]: + r"""Return a callable for the list builds method over gRPC. + + Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + Returns: + Callable[[~.ListBuildsRequest], + ~.ListBuildsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + request_serializer=cloudbuild.ListBuildsRequest.serialize, + response_deserializer=cloudbuild.ListBuildsResponse.deserialize, + ) + return self._stubs['list_builds'] + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + cloudbuild.Build]: + r"""Return a callable for the cancel build method over gRPC. + + Cancels a build in progress. + + Returns: + Callable[[~.CancelBuildRequest], + ~.Build]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + request_serializer=cloudbuild.CancelBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['cancel_build'] + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the retry build method over gRPC. + + Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + Returns: + Callable[[~.RetryBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + request_serializer=cloudbuild.RetryBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['retry_build'] + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the approve build method over gRPC. + + Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + Returns: + Callable[[~.ApproveBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'approve_build' not in self._stubs: + self._stubs['approve_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', + request_serializer=cloudbuild.ApproveBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['approve_build'] + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the create build trigger method over gRPC. + + Creates a new ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.CreateBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['create_build_trigger'] + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the get build trigger method over gRPC. + + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.GetBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['get_build_trigger'] + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + cloudbuild.ListBuildTriggersResponse]: + r"""Return a callable for the list build triggers method over gRPC. + + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Returns: + Callable[[~.ListBuildTriggersRequest], + ~.ListBuildTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, + response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, + ) + return self._stubs['list_build_triggers'] + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete build trigger method over gRPC. + + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.DeleteBuildTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_build_trigger'] + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the update build trigger method over gRPC. + + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.UpdateBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['update_build_trigger'] + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + operations_pb2.Operation]: + r"""Return a callable for the run build trigger method over gRPC. + + Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + Returns: + Callable[[~.RunBuildTriggerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['run_build_trigger'] + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + cloudbuild.ReceiveTriggerWebhookResponse]: + r"""Return a callable for the receive trigger webhook method over gRPC. + + ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + Returns: + Callable[[~.ReceiveTriggerWebhookRequest], + ~.ReceiveTriggerWebhookResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'receive_trigger_webhook' not in self._stubs: + self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', + request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, + response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, + ) + return self._stubs['receive_trigger_webhook'] + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the create worker pool method over gRPC. + + Creates a ``WorkerPool``. + + Returns: + Callable[[~.CreateWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_worker_pool'] + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + cloudbuild.WorkerPool]: + r"""Return a callable for the get worker pool method over gRPC. + + Returns details of a ``WorkerPool``. + + Returns: + Callable[[~.GetWorkerPoolRequest], + ~.WorkerPool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, + response_deserializer=cloudbuild.WorkerPool.deserialize, + ) + return self._stubs['get_worker_pool'] + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete worker pool method over gRPC. + + Deletes a ``WorkerPool``. + + Returns: + Callable[[~.DeleteWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_worker_pool'] + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the update worker pool method over gRPC. + + Updates a ``WorkerPool``. + + Returns: + Callable[[~.UpdateWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_worker_pool'] + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + cloudbuild.ListWorkerPoolsResponse]: + r"""Return a callable for the list worker pools method over gRPC. + + Lists ``WorkerPool``\ s. + + Returns: + Callable[[~.ListWorkerPoolsRequest], + ~.ListWorkerPoolsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, + response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, + ) + return self._stubs['list_worker_pools'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CloudBuildGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py new file mode 100644 index 00000000..e9f45881 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -0,0 +1,792 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudBuildGrpcTransport + + +class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): + """gRPC AsyncIO backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create build method over gRPC. + + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + Returns: + Callable[[~.CreateBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + request_serializer=cloudbuild.CreateBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_build'] + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + Awaitable[cloudbuild.Build]]: + r"""Return a callable for the get build method over gRPC. + + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + Returns: + Callable[[~.GetBuildRequest], + Awaitable[~.Build]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + request_serializer=cloudbuild.GetBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['get_build'] + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + Awaitable[cloudbuild.ListBuildsResponse]]: + r"""Return a callable for the list builds method over gRPC. + + Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + Returns: + Callable[[~.ListBuildsRequest], + Awaitable[~.ListBuildsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + request_serializer=cloudbuild.ListBuildsRequest.serialize, + response_deserializer=cloudbuild.ListBuildsResponse.deserialize, + ) + return self._stubs['list_builds'] + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + Awaitable[cloudbuild.Build]]: + r"""Return a callable for the cancel build method over gRPC. + + Cancels a build in progress. + + Returns: + Callable[[~.CancelBuildRequest], + Awaitable[~.Build]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + request_serializer=cloudbuild.CancelBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['cancel_build'] + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the retry build method over gRPC. + + Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + Returns: + Callable[[~.RetryBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + request_serializer=cloudbuild.RetryBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['retry_build'] + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the approve build method over gRPC. + + Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + Returns: + Callable[[~.ApproveBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'approve_build' not in self._stubs: + self._stubs['approve_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', + request_serializer=cloudbuild.ApproveBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['approve_build'] + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the create build trigger method over gRPC. + + Creates a new ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.CreateBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['create_build_trigger'] + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the get build trigger method over gRPC. + + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.GetBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['get_build_trigger'] + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Awaitable[cloudbuild.ListBuildTriggersResponse]]: + r"""Return a callable for the list build triggers method over gRPC. + + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Returns: + Callable[[~.ListBuildTriggersRequest], + Awaitable[~.ListBuildTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, + response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, + ) + return self._stubs['list_build_triggers'] + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete build trigger method over gRPC. + + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.DeleteBuildTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_build_trigger'] + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the update build trigger method over gRPC. + + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.UpdateBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['update_build_trigger'] + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the run build trigger method over gRPC. + + Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + Returns: + Callable[[~.RunBuildTriggerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['run_build_trigger'] + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + Awaitable[cloudbuild.ReceiveTriggerWebhookResponse]]: + r"""Return a callable for the receive trigger webhook method over gRPC. + + ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + Returns: + Callable[[~.ReceiveTriggerWebhookRequest], + Awaitable[~.ReceiveTriggerWebhookResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'receive_trigger_webhook' not in self._stubs: + self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', + request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, + response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, + ) + return self._stubs['receive_trigger_webhook'] + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create worker pool method over gRPC. + + Creates a ``WorkerPool``. + + Returns: + Callable[[~.CreateWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_worker_pool'] + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + Awaitable[cloudbuild.WorkerPool]]: + r"""Return a callable for the get worker pool method over gRPC. + + Returns details of a ``WorkerPool``. + + Returns: + Callable[[~.GetWorkerPoolRequest], + Awaitable[~.WorkerPool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, + response_deserializer=cloudbuild.WorkerPool.deserialize, + ) + return self._stubs['get_worker_pool'] + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete worker pool method over gRPC. + + Deletes a ``WorkerPool``. + + Returns: + Callable[[~.DeleteWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_worker_pool'] + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update worker pool method over gRPC. + + Updates a ``WorkerPool``. + + Returns: + Callable[[~.UpdateWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_worker_pool'] + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Awaitable[cloudbuild.ListWorkerPoolsResponse]]: + r"""Return a callable for the list worker pools method over gRPC. + + Lists ``WorkerPool``\ s. + + Returns: + Callable[[~.ListWorkerPoolsRequest], + Awaitable[~.ListWorkerPoolsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, + response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, + ) + return self._stubs['list_worker_pools'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'CloudBuildGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py new file mode 100644 index 00000000..8d6d8492 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py @@ -0,0 +1,2419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudBuildRestInterceptor: + """Interceptor for CloudBuild. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudBuildRestTransport. + + .. code-block:: python + class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): + def pre_approve_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_approve_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_builds(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_builds(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_build_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_build_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_worker_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_worker_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_receive_trigger_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_receive_trigger_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retry_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retry_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) + client = CloudBuildClient(transport=transport) + + + """ + def pre_approve_build(self, request: cloudbuild.ApproveBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ApproveBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for approve_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_approve_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for approve_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_cancel_build(self, request: cloudbuild.CancelBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CancelBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for cancel_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_build(self, request: cloudbuild.CreateBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_build_trigger(self, request: cloudbuild.CreateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_worker_pool(self, request: cloudbuild.CreateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_delete_build_trigger(self, request: cloudbuild.DeleteBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def pre_delete_worker_pool(self, request: cloudbuild.DeleteWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_delete_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_build(self, request: cloudbuild.GetBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for get_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_build_trigger(self, request: cloudbuild.GetBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_worker_pool(self, request: cloudbuild.GetWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_worker_pool(self, response: cloudbuild.WorkerPool) -> cloudbuild.WorkerPool: + """Post-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_builds(self, request: cloudbuild.ListBuildsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_builds + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_builds(self, response: cloudbuild.ListBuildsResponse) -> cloudbuild.ListBuildsResponse: + """Post-rpc interceptor for list_builds + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_build_triggers(self, request: cloudbuild.ListBuildTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_build_triggers(self, response: cloudbuild.ListBuildTriggersResponse) -> cloudbuild.ListBuildTriggersResponse: + """Post-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_worker_pools(self, request: cloudbuild.ListWorkerPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListWorkerPoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_worker_pools(self, response: cloudbuild.ListWorkerPoolsResponse) -> cloudbuild.ListWorkerPoolsResponse: + """Post-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_receive_trigger_webhook(self, request: cloudbuild.ReceiveTriggerWebhookRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ReceiveTriggerWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_receive_trigger_webhook(self, response: cloudbuild.ReceiveTriggerWebhookResponse) -> cloudbuild.ReceiveTriggerWebhookResponse: + """Post-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_retry_build(self, request: cloudbuild.RetryBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RetryBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retry_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_retry_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for retry_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_run_build_trigger(self, request: cloudbuild.RunBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RunBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_run_build_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_update_build_trigger(self, request: cloudbuild.UpdateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_update_worker_pool(self, request: cloudbuild.UpdateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudBuildRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudBuildRestInterceptor + + +class CloudBuildRestTransport(CloudBuildTransport): + """REST backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CloudBuildRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudBuildRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=operations/**}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=operations/**}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _ApproveBuild(CloudBuildRestStub): + def __hash__(self): + return hash("ApproveBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ApproveBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the approve build method over HTTP. + + Args: + request (~.cloudbuild.ApproveBuildRequest): + The request object. Request to approve or reject a + pending build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/builds/*}:approve', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:approve', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_approve_build(request, metadata) + pb_request = cloudbuild.ApproveBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_approve_build(resp) + return resp + + class _CancelBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CancelBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CancelBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.Build: + r"""Call the cancel build method over HTTP. + + Args: + request (~.cloudbuild.CancelBuildRequest): + The request object. Request to cancel an ongoing build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds/{id}:cancel', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:cancel', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_cancel_build(request, metadata) + pb_request = cloudbuild.CancelBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_build(resp) + return resp + + class _CreateBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create build method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildRequest): + The request object. Request to create a new build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds', + 'body': 'build', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/builds', + 'body': 'build', + }, + ] + request, metadata = self._interceptor.pre_create_build(request, metadata) + pb_request = cloudbuild.CreateBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build(resp) + return resp + + class _CreateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the create build trigger method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildTriggerRequest): + The request object. Request to create a new ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers', + 'body': 'trigger', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_create_build_trigger(request, metadata) + pb_request = cloudbuild.CreateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build_trigger(resp) + return resp + + class _CreateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("CreateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workerPoolId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create worker pool method over HTTP. + + Args: + request (~.cloudbuild.CreateWorkerPoolRequest): + The request object. Request to create a new ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', + 'body': 'worker_pool', + }, + ] + request, metadata = self._interceptor.pre_create_worker_pool(request, metadata) + pb_request = cloudbuild.CreateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_worker_pool(resp) + return resp + + class _DeleteBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.DeleteBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete build trigger method over HTTP. + + Args: + request (~.cloudbuild.DeleteBuildTriggerRequest): + The request object. Request to delete a ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + }, +{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_build_trigger(request, metadata) + pb_request = cloudbuild.DeleteBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.DeleteWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete worker pool method over HTTP. + + Args: + request (~.cloudbuild.DeleteWorkerPoolRequest): + The request object. Request to delete a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_worker_pool(request, metadata) + pb_request = cloudbuild.DeleteWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_worker_pool(resp) + return resp + + class _GetBuild(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.Build: + r"""Call the get build method over HTTP. + + Args: + request (~.cloudbuild.GetBuildRequest): + The request object. Request to get a build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/builds/{id}', + }, +{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}', + }, + ] + request, metadata = self._interceptor.pre_get_build(request, metadata) + pb_request = cloudbuild.GetBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build(resp) + return resp + + class _GetBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the get build trigger method over HTTP. + + Args: + request (~.cloudbuild.GetBuildTriggerRequest): + The request object. Returns the ``BuildTrigger`` with the specified ID. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + }, +{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_build_trigger(request, metadata) + pb_request = cloudbuild.GetBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build_trigger(resp) + return resp + + class _GetWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("GetWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.WorkerPool: + r"""Call the get worker pool method over HTTP. + + Args: + request (~.cloudbuild.GetWorkerPoolRequest): + The request object. Request to get a ``WorkerPool`` with the specified name. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.WorkerPool: + Configuration for a ``WorkerPool``. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a ``WorkerPool`` to run your + builds. Private ``WorkerPool``\ s give your builds + access to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see `Private + pools + overview `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', + }, + ] + request, metadata = self._interceptor.pre_get_worker_pool(request, metadata) + pb_request = cloudbuild.GetWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.WorkerPool() + pb_resp = cloudbuild.WorkerPool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_worker_pool(resp) + return resp + + class _ListBuilds(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuilds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListBuildsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListBuildsResponse: + r"""Call the list builds method over HTTP. + + Args: + request (~.cloudbuild.ListBuildsRequest): + The request object. Request to list builds. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildsResponse: + Response including listed builds. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/builds', + }, +{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/builds', + }, + ] + request, metadata = self._interceptor.pre_list_builds(request, metadata) + pb_request = cloudbuild.ListBuildsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildsResponse() + pb_resp = cloudbuild.ListBuildsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_builds(resp) + return resp + + class _ListBuildTriggers(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuildTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListBuildTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListBuildTriggersResponse: + r"""Call the list build triggers method over HTTP. + + Args: + request (~.cloudbuild.ListBuildTriggersRequest): + The request object. Request to list existing ``BuildTriggers``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildTriggersResponse: + Response containing existing ``BuildTriggers``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/triggers', + }, +{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + }, + ] + request, metadata = self._interceptor.pre_list_build_triggers(request, metadata) + pb_request = cloudbuild.ListBuildTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildTriggersResponse() + pb_resp = cloudbuild.ListBuildTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_build_triggers(resp) + return resp + + class _ListWorkerPools(CloudBuildRestStub): + def __hash__(self): + return hash("ListWorkerPools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListWorkerPoolsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListWorkerPoolsResponse: + r"""Call the list worker pools method over HTTP. + + Args: + request (~.cloudbuild.ListWorkerPoolsRequest): + The request object. Request to list ``WorkerPool``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListWorkerPoolsResponse: + Response containing existing ``WorkerPools``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', + }, + ] + request, metadata = self._interceptor.pre_list_worker_pools(request, metadata) + pb_request = cloudbuild.ListWorkerPoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListWorkerPoolsResponse() + pb_resp = cloudbuild.ListWorkerPoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_worker_pools(resp) + return resp + + class _ReceiveTriggerWebhook(CloudBuildRestStub): + def __hash__(self): + return hash("ReceiveTriggerWebhook") + + def __call__(self, + request: cloudbuild.ReceiveTriggerWebhookRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""Call the receive trigger webhook method over HTTP. + + Args: + request (~.cloudbuild.ReceiveTriggerWebhookRequest): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the + response object for the ReceiveTriggerWebhook method. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers/{trigger}:webhook', + 'body': 'body', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:webhook', + 'body': 'body', + }, + ] + request, metadata = self._interceptor.pre_receive_trigger_webhook(request, metadata) + pb_request = cloudbuild.ReceiveTriggerWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ReceiveTriggerWebhookResponse() + pb_resp = cloudbuild.ReceiveTriggerWebhookResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_receive_trigger_webhook(resp) + return resp + + class _RetryBuild(CloudBuildRestStub): + def __hash__(self): + return hash("RetryBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.RetryBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the retry build method over HTTP. + + Args: + request (~.cloudbuild.RetryBuildRequest): + The request object. Specifies a build to retry. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds/{id}:retry', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:retry', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_retry_build(request, metadata) + pb_request = cloudbuild.RetryBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retry_build(resp) + return resp + + class _RunBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("RunBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.RunBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the run build trigger method over HTTP. + + Args: + request (~.cloudbuild.RunBuildTriggerRequest): + The request object. Specifies a build trigger to run and + the source to use. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}:run', + 'body': 'source', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:run', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_run_build_trigger(request, metadata) + pb_request = cloudbuild.RunBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_build_trigger(resp) + return resp + + class _UpdateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.UpdateBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the update build trigger method over HTTP. + + Args: + request (~.cloudbuild.UpdateBuildTriggerRequest): + The request object. Request to update an existing ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + 'body': 'trigger', + }, +{ + 'method': 'patch', + 'uri': '/v1/{trigger.resource_name=projects/*/locations/*/triggers/*}', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_update_build_trigger(request, metadata) + pb_request = cloudbuild.UpdateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_build_trigger(resp) + return resp + + class _UpdateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.UpdateWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update worker pool method over HTTP. + + Args: + request (~.cloudbuild.UpdateWorkerPoolRequest): + The request object. Request to update a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}', + 'body': 'worker_pool', + }, + ] + request, metadata = self._interceptor.pre_update_worker_pool(request, metadata) + pb_request = cloudbuild.UpdateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_worker_pool(resp) + return resp + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApproveBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + cloudbuild.WorkerPool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + cloudbuild.ListBuildsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuilds(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + cloudbuild.ListBuildTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuildTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + cloudbuild.ListWorkerPoolsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkerPools(self._session, self._host, self._interceptor) # type: ignore + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + cloudbuild.ReceiveTriggerWebhookResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReceiveTriggerWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetryBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CloudBuildRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py new file mode 100644 index 00000000..fab30741 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudbuild import ( + ApprovalConfig, + ApprovalResult, + ApproveBuildRequest, + ArtifactResult, + Artifacts, + Build, + BuildApproval, + BuildOperationMetadata, + BuildOptions, + BuildStep, + BuildTrigger, + BuiltImage, + CancelBuildRequest, + CreateBuildRequest, + CreateBuildTriggerRequest, + CreateWorkerPoolOperationMetadata, + CreateWorkerPoolRequest, + DeleteBuildTriggerRequest, + DeleteWorkerPoolOperationMetadata, + DeleteWorkerPoolRequest, + FileHashes, + GetBuildRequest, + GetBuildTriggerRequest, + GetWorkerPoolRequest, + GitHubEventsConfig, + GitSource, + Hash, + InlineSecret, + ListBuildsRequest, + ListBuildsResponse, + ListBuildTriggersRequest, + ListBuildTriggersResponse, + ListWorkerPoolsRequest, + ListWorkerPoolsResponse, + PrivatePoolV1Config, + PubsubConfig, + PullRequestFilter, + PushFilter, + ReceiveTriggerWebhookRequest, + ReceiveTriggerWebhookResponse, + RepositoryEventConfig, + RepoSource, + Results, + RetryBuildRequest, + RunBuildTriggerRequest, + Secret, + SecretManagerSecret, + Secrets, + Source, + SourceProvenance, + StorageSource, + StorageSourceManifest, + TimeSpan, + UpdateBuildTriggerRequest, + UpdateWorkerPoolOperationMetadata, + UpdateWorkerPoolRequest, + UploadedMavenArtifact, + UploadedNpmPackage, + UploadedPythonPackage, + Volume, + WebhookConfig, + WorkerPool, +) + +__all__ = ( + 'ApprovalConfig', + 'ApprovalResult', + 'ApproveBuildRequest', + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildApproval', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolOperationMetadata', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolOperationMetadata', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'GitSource', + 'Hash', + 'InlineSecret', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'PrivatePoolV1Config', + 'PubsubConfig', + 'PullRequestFilter', + 'PushFilter', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'RepositoryEventConfig', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'SecretManagerSecret', + 'Secrets', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'StorageSourceManifest', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolRequest', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'UploadedPythonPackage', + 'Volume', + 'WebhookConfig', + 'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py new file mode 100644 index 00000000..838474d2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -0,0 +1,3680 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v1', + manifest={ + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'StorageSource', + 'GitSource', + 'RepoSource', + 'StorageSourceManifest', + 'Source', + 'BuiltImage', + 'UploadedPythonPackage', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'BuildStep', + 'Volume', + 'Results', + 'ArtifactResult', + 'Build', + 'Artifacts', + 'TimeSpan', + 'BuildOperationMetadata', + 'SourceProvenance', + 'FileHashes', + 'Hash', + 'Secrets', + 'InlineSecret', + 'SecretManagerSecret', + 'Secret', + 'CreateBuildRequest', + 'GetBuildRequest', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'CancelBuildRequest', + 'ApproveBuildRequest', + 'BuildApproval', + 'ApprovalConfig', + 'ApprovalResult', + 'BuildTrigger', + 'RepositoryEventConfig', + 'GitHubEventsConfig', + 'PubsubConfig', + 'WebhookConfig', + 'PullRequestFilter', + 'PushFilter', + 'CreateBuildTriggerRequest', + 'GetBuildTriggerRequest', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'DeleteBuildTriggerRequest', + 'UpdateBuildTriggerRequest', + 'BuildOptions', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'WorkerPool', + 'PrivatePoolV1Config', + 'CreateWorkerPoolRequest', + 'GetWorkerPoolRequest', + 'DeleteWorkerPoolRequest', + 'UpdateWorkerPoolRequest', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'CreateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolOperationMetadata', + 'DeleteWorkerPoolOperationMetadata', + }, +) + + +class RetryBuildRequest(proto.Message): + r"""Specifies a build to retry. + + Attributes: + name (str): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. Build ID of the original build. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RunBuildTriggerRequest(proto.Message): + r"""Specifies a build trigger to run and the source to use. + + Attributes: + name (str): + The name of the ``Trigger`` to run. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project. + trigger_id (str): + Required. ID of the trigger. + source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Source to build against this trigger. + Branch and tag names cannot consist of regular + expressions. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=3, + message='RepoSource', + ) + + +class StorageSource(proto.Message): + r"""Location of the source in an archive file in Cloud Storage. + + Attributes: + bucket (str): + Cloud Storage bucket containing the source (see `Bucket Name + Requirements `__). + object_ (str): + Cloud Storage object containing the source. + + This object must be a zipped (``.zip``) or gzipped archive + file (``.tar.gz``) containing source to build. + generation (int): + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +class GitSource(proto.Message): + r"""Location of the source in any accessible Git repository. + + Attributes: + url (str): + Location of the Git repo to build. + + This will be used as a ``git remote``, see + https://git-scm.com/docs/git-remote. + dir_ (str): + Directory, relative to the source root, in which to run the + build. + + This must be a relative path. If a step's ``dir`` is + specified and is an absolute path, this value is ignored for + that step's execution. + revision (str): + The revision to fetch from the Git repository such as a + branch, a tag, a commit SHA, or any Git ref. + + Cloud Build uses ``git fetch`` to fetch the revision from + the Git repository; therefore make sure that the string you + provide for ``revision`` is parsable by the command. For + information on string values accepted by ``git fetch``, see + https://git-scm.com/docs/gitrevisions#_specifying_revisions. + For information on ``git fetch``, see + https://git-scm.com/docs/git-fetch. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + dir_: str = proto.Field( + proto.STRING, + number=5, + ) + revision: str = proto.Field( + proto.STRING, + number=6, + ) + + +class RepoSource(proto.Message): + r"""Location of the source in a Google Cloud Source Repository. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + ID of the project that owns the Cloud Source + Repository. If omitted, the project ID + requesting the build is assumed. + repo_name (str): + Name of the Cloud Source Repository. + branch_name (str): + Regex matching branches to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``revision``. + tag_name (str): + Regex matching tags to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``revision``. + commit_sha (str): + Explicit commit SHA to build. + + This field is a member of `oneof`_ ``revision``. + dir_ (str): + Directory, relative to the source root, in which to run the + build. + + This must be a relative path. If a step's ``dir`` is + specified and is an absolute path, this value is ignored for + that step's execution. + invert_regex (bool): + Only trigger a build if the revision regex + does NOT match the revision regex. + substitutions (MutableMapping[str, str]): + Substitutions to use in a triggered build. + Should only be used with RunBuildTrigger + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + repo_name: str = proto.Field( + proto.STRING, + number=2, + ) + branch_name: str = proto.Field( + proto.STRING, + number=3, + oneof='revision', + ) + tag_name: str = proto.Field( + proto.STRING, + number=4, + oneof='revision', + ) + commit_sha: str = proto.Field( + proto.STRING, + number=5, + oneof='revision', + ) + dir_: str = proto.Field( + proto.STRING, + number=7, + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=8, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + + +class StorageSourceManifest(proto.Message): + r"""Location of the source manifest in Cloud Storage. This feature is in + Preview; see description + `here `__. + + Attributes: + bucket (str): + Cloud Storage bucket containing the source manifest (see + `Bucket Name + Requirements `__). + object_ (str): + Cloud Storage object containing the source + manifest. + This object must be a JSON file. + generation (int): + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +class Source(proto.Message): + r"""Location of the source in a supported storage service. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): + If provided, get the source from this + location in Cloud Storage. + + This field is a member of `oneof`_ ``source``. + repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + If provided, get the source from this + location in a Cloud Source Repository. + + This field is a member of `oneof`_ ``source``. + git_source (google.cloud.devtools.cloudbuild_v1.types.GitSource): + If provided, get the source from this Git + repository. + + This field is a member of `oneof`_ ``source``. + storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): + If provided, get the source from this manifest in Cloud + Storage. This feature is in Preview; see description + `here `__. + + This field is a member of `oneof`_ ``source``. + """ + + storage_source: 'StorageSource' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='StorageSource', + ) + repo_source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='RepoSource', + ) + git_source: 'GitSource' = proto.Field( + proto.MESSAGE, + number=5, + oneof='source', + message='GitSource', + ) + storage_source_manifest: 'StorageSourceManifest' = proto.Field( + proto.MESSAGE, + number=8, + oneof='source', + message='StorageSourceManifest', + ) + + +class BuiltImage(proto.Message): + r"""An image built by the pipeline. + + Attributes: + name (str): + Name used to push the container image to Google Container + Registry, as presented to ``docker push``. + digest (str): + Docker Registry 2.0 digest. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified image. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + digest: str = proto.Field( + proto.STRING, + number=3, + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=4, + message='TimeSpan', + ) + + +class UploadedPythonPackage(proto.Message): + r"""Artifact uploaded using the PythonPackage directive. + + Attributes: + uri (str): + URI of the uploaded artifact. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the Python Artifact. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class UploadedMavenArtifact(proto.Message): + r"""A Maven artifact uploaded using the MavenArtifact directive. + + Attributes: + uri (str): + URI of the uploaded artifact. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the Maven Artifact. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class UploadedNpmPackage(proto.Message): + r"""An npm package uploaded to Artifact Registry using the + NpmPackage directive. + + Attributes: + uri (str): + URI of the uploaded npm package. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the npm package. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class BuildStep(proto.Message): + r"""A step in the build pipeline. + + Attributes: + name (str): + Required. The name of the container image that will run this + particular build step. + + If the image is available in the host's Docker daemon's + cache, it will be run directly. If not, the host will + attempt to pull the image first, using the builder service + account's credentials if necessary. + + The Docker daemon's cache will already have the latest + versions of all of the officially supported build steps + (https://github.com/GoogleCloudPlatform/cloud-builders). The + Docker daemon will also have cached many of the layers for + some popular images, like "ubuntu", "debian", but they will + be refreshed at the time you attempt to use them. + + If you built an image in a previous build step, it will be + stored in the host's Docker daemon's cache and is available + to use as the name for a later build step. + env (MutableSequence[str]): + A list of environment variable definitions to + be used when running a step. + The elements are of the form "KEY=VALUE" for the + environment variable "KEY" being given the value + "VALUE". + args (MutableSequence[str]): + A list of arguments that will be presented to the step when + it is started. + + If the image used to run the step's container has an + entrypoint, the ``args`` are used as arguments to that + entrypoint. If the image does not define an entrypoint, the + first element in args is used as the entrypoint, and the + remainder will be used as arguments. + dir_ (str): + Working directory to use when running this step's container. + + If this value is a relative path, it is relative to the + build's working directory. If this value is absolute, it may + be outside the build's working directory, in which case the + contents of the path may not be persisted across build step + executions, unless a ``volume`` for that path is specified. + + If the build specifies a ``RepoSource`` with ``dir`` and a + step with a ``dir``, which specifies an absolute path, the + ``RepoSource`` ``dir`` is ignored for the step's execution. + id (str): + Unique identifier for this build step, used in ``wait_for`` + to reference this build step as a dependency. + wait_for (MutableSequence[str]): + The ID(s) of the step(s) that this build step depends on. + This build step will not start until all the build steps in + ``wait_for`` have completed successfully. If ``wait_for`` is + empty, this build step will start when all previous build + steps in the ``Build.Steps`` list have completed + successfully. + entrypoint (str): + Entrypoint to be used instead of the build + step image's default entrypoint. If unset, the + image's default entrypoint is used. + secret_env (MutableSequence[str]): + A list of environment variables which are encrypted using a + Cloud Key Management Service crypto key. These values must + be specified in the build's ``Secret``. + volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): + List of volumes to mount into the build step. + Each volume is created as an empty volume prior + to execution of the build step. Upon completion + of the build, volumes and their contents are + discarded. + + Using a named volume in only one step is not + valid as it is indicative of a build request + with an incorrect configuration. + timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + executing this build step. + pull_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pulling this build step's builder image only. + timeout (google.protobuf.duration_pb2.Duration): + Time limit for executing this build step. If + not defined, the step has no time limit and will + be allowed to continue to run until either it + completes or the build itself times out. + status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): + Output only. Status of the build step. At + this time, build step status is only updated on + build completion; step status is not updated in + real-time as the build progresses. + allow_failure (bool): + Allow this build step to fail without failing the entire + build. + + If false, the entire build will fail if this step fails. + Otherwise, the build will succeed, but this step will still + have a failure status. Error information will be reported in + the failure_detail field. + exit_code (int): + Output only. Return code from running the + step. + allow_exit_codes (MutableSequence[int]): + Allow this build step to fail without failing the entire + build if and only if the exit code is one of the specified + codes. If allow_failure is also specified, this field will + take precedence. + script (str): + A shell script to be executed in the step. + When script is provided, the user cannot specify + the entrypoint or args. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + dir_: str = proto.Field( + proto.STRING, + number=4, + ) + id: str = proto.Field( + proto.STRING, + number=5, + ) + wait_for: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + entrypoint: str = proto.Field( + proto.STRING, + number=7, + ) + secret_env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + volumes: MutableSequence['Volume'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='Volume', + ) + timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=10, + message='TimeSpan', + ) + pull_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=13, + message='TimeSpan', + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + message=duration_pb2.Duration, + ) + status: 'Build.Status' = proto.Field( + proto.ENUM, + number=12, + enum='Build.Status', + ) + allow_failure: bool = proto.Field( + proto.BOOL, + number=14, + ) + exit_code: int = proto.Field( + proto.INT32, + number=16, + ) + allow_exit_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=18, + ) + script: str = proto.Field( + proto.STRING, + number=19, + ) + + +class Volume(proto.Message): + r"""Volume describes a Docker container volume which is mounted + into build steps in order to persist files across build step + execution. + + Attributes: + name (str): + Name of the volume to mount. + Volume names must be unique per build step and + must be valid names for Docker volumes. Each + named volume must be used by at least two build + steps. + path (str): + Path at which to mount the volume. + Paths must be absolute and cannot conflict with + other volume paths on the same build step or + with certain reserved volume paths. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Results(proto.Message): + r"""Artifacts created by the build pipeline. + + Attributes: + images (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuiltImage]): + Container images that were built as a part of + the build. + build_step_images (MutableSequence[str]): + List of build step digests, in the order + corresponding to build step indices. + artifact_manifest (str): + Path to the artifact manifest for + non-container artifacts uploaded to Cloud + Storage. Only populated when artifacts are + uploaded to Cloud Storage. + num_artifacts (int): + Number of non-container artifacts uploaded to + Cloud Storage. Only populated when artifacts are + uploaded to Cloud Storage. + build_step_outputs (MutableSequence[bytes]): + List of build step outputs, produced by builder images, in + the order corresponding to build step indices. + + `Cloud + Builders `__ + can produce this output by writing to + ``$BUILDER_OUTPUT/output``. Only the first 4KB of data is + stored. + artifact_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Time to push all non-container artifacts to + Cloud Storage. + python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedPythonPackage]): + Python artifacts uploaded to Artifact + Registry at the end of the build. + maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedMavenArtifact]): + Maven artifacts uploaded to Artifact Registry + at the end of the build. + npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedNpmPackage]): + Npm packages uploaded to Artifact Registry at + the end of the build. + """ + + images: MutableSequence['BuiltImage'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='BuiltImage', + ) + build_step_images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + artifact_manifest: str = proto.Field( + proto.STRING, + number=4, + ) + num_artifacts: int = proto.Field( + proto.INT64, + number=5, + ) + build_step_outputs: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=6, + ) + artifact_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=7, + message='TimeSpan', + ) + python_packages: MutableSequence['UploadedPythonPackage'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='UploadedPythonPackage', + ) + maven_artifacts: MutableSequence['UploadedMavenArtifact'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='UploadedMavenArtifact', + ) + npm_packages: MutableSequence['UploadedNpmPackage'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='UploadedNpmPackage', + ) + + +class ArtifactResult(proto.Message): + r"""An artifact that was uploaded during a build. This + is a single record in the artifact manifest JSON file. + + Attributes: + location (str): + The path of an artifact in a Cloud Storage bucket, with the + generation number. For example, + ``gs://mybucket/path/to/output.jar#generation``. + file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): + The file hash of the artifact. + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + file_hash: MutableSequence['FileHashes'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + + +class Build(proto.Message): + r"""A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find source code, + how to build it (for example, the builder image to run on the + source), and where to store the built artifacts. + + Fields can include the following variables, which will be expanded + when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by RepoSource. + - $BRANCH_NAME: the branch name specified by RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by + RepoSource or resolved from the specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. + + Attributes: + name (str): + Output only. The 'Build' name with format: + ``projects/{project}/locations/{location}/builds/{build}``, + where {build} is a unique identifier generated by the + service. + id (str): + Output only. Unique identifier of the build. + project_id (str): + Output only. ID of the project. + status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): + Output only. Status of the build. + status_detail (str): + Output only. Customer-readable message about + the current status. + source (google.cloud.devtools.cloudbuild_v1.types.Source): + The location of the source files to build. + steps (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildStep]): + Required. The operations to be performed on + the workspace. + results (google.cloud.devtools.cloudbuild_v1.types.Results): + Output only. Results of the build. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to + create the build was received. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which execution of the + build was started. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which execution of the build was + finished. + + The difference between finish_time and start_time is the + duration of the build's execution. + timeout (google.protobuf.duration_pb2.Duration): + Amount of time that this build should be allowed to run, to + second granularity. If this amount of time elapses, work on + the build will cease and the build status will be + ``TIMEOUT``. + + ``timeout`` starts ticking from ``startTime``. + + Default time is 60 minutes. + images (MutableSequence[str]): + A list of images to be pushed upon the successful completion + of all build steps. + + The images are pushed using the builder service account's + credentials. + + The digests of the pushed images will be stored in the + ``Build`` resource's results field. + + If any of the images fail to be pushed, the build status is + marked ``FAILURE``. + queue_ttl (google.protobuf.duration_pb2.Duration): + TTL in queue for this build. If provided and the build is + enqueued longer than this value, the build will expire and + the build status will be ``EXPIRED``. + + The TTL starts ticking from create_time. + artifacts (google.cloud.devtools.cloudbuild_v1.types.Artifacts): + Artifacts produced by the build that should + be uploaded upon successful completion of all + build steps. + logs_bucket (str): + Cloud Storage bucket where logs should be written (see + `Bucket Name + Requirements `__). + Logs file names will be of the format + ``${logs_bucket}/log-${build_id}.txt``. + source_provenance (google.cloud.devtools.cloudbuild_v1.types.SourceProvenance): + Output only. A permanent fixed identifier for + source. + build_trigger_id (str): + Output only. The ID of the ``BuildTrigger`` that triggered + this build, if it was triggered automatically. + options (google.cloud.devtools.cloudbuild_v1.types.BuildOptions): + Special options for this build. + log_url (str): + Output only. URL to logs for this build in + Google Cloud Console. + substitutions (MutableMapping[str, str]): + Substitutions data for ``Build`` resource. + tags (MutableSequence[str]): + Tags for annotation of a ``Build``. These are not docker + tags. + secrets (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Secret]): + Secrets to decrypt using Cloud Key Management Service. Note: + Secret Manager is the recommended technique for managing + sensitive data with Cloud Build. Use ``available_secrets`` + to configure builds to access secrets from Secret Manager. + For instructions, see: + https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets + timing (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.TimeSpan]): + Output only. Stores timing information for phases of the + build. Valid keys are: + + - BUILD: time to execute all build steps. + - PUSH: time to push all artifacts including docker images + and non docker artifacts. + - FETCHSOURCE: time to fetch source. + - SETUPBUILD: time to set up build. + + If the build does not specify source or images, these keys + will not be included. + approval (google.cloud.devtools.cloudbuild_v1.types.BuildApproval): + Output only. Describes this build's approval + configuration, status, and result. + service_account (str): + IAM service account whose credentials will be used at build + runtime. Must be of the format + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT + can be email address or uniqueId of the service account. + available_secrets (google.cloud.devtools.cloudbuild_v1.types.Secrets): + Secrets and secret environment variables. + warnings (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build.Warning]): + Output only. Non-fatal problems encountered + during the execution of the build. + failure_info (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo): + Output only. Contains information about the + build when status=FAILURE. + """ + class Status(proto.Enum): + r"""Possible status of a build or build step. + + Values: + STATUS_UNKNOWN (0): + Status of the build is unknown. + PENDING (10): + Build has been created and is pending + execution and queuing. It has not been queued. + QUEUED (1): + Build or step is queued; work has not yet + begun. + WORKING (2): + Build or step is being executed. + SUCCESS (3): + Build or step finished successfully. + FAILURE (4): + Build or step failed to complete + successfully. + INTERNAL_ERROR (5): + Build or step failed due to an internal + cause. + TIMEOUT (6): + Build or step took longer than was allowed. + CANCELLED (7): + Build or step was canceled by a user. + EXPIRED (9): + Build was enqueued for longer than the value of + ``queue_ttl``. + """ + STATUS_UNKNOWN = 0 + PENDING = 10 + QUEUED = 1 + WORKING = 2 + SUCCESS = 3 + FAILURE = 4 + INTERNAL_ERROR = 5 + TIMEOUT = 6 + CANCELLED = 7 + EXPIRED = 9 + + class Warning(proto.Message): + r"""A non-fatal problem encountered during the execution of the + build. + + Attributes: + text (str): + Explanation of the warning generated. + priority (google.cloud.devtools.cloudbuild_v1.types.Build.Warning.Priority): + The priority for this warning. + """ + class Priority(proto.Enum): + r"""The relative importance of this warning. + + Values: + PRIORITY_UNSPECIFIED (0): + Should not be used. + INFO (1): + e.g. deprecation warnings and alternative + feature highlights. + WARNING (2): + e.g. automated detection of possible issues + with the build. + ALERT (3): + e.g. alerts that a feature used in the build + is pending removal + """ + PRIORITY_UNSPECIFIED = 0 + INFO = 1 + WARNING = 2 + ALERT = 3 + + text: str = proto.Field( + proto.STRING, + number=1, + ) + priority: 'Build.Warning.Priority' = proto.Field( + proto.ENUM, + number=2, + enum='Build.Warning.Priority', + ) + + class FailureInfo(proto.Message): + r"""A fatal problem encountered during the execution of the + build. + + Attributes: + type_ (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo.FailureType): + The name of the failure. + detail (str): + Explains the failure issue in more detail + using hard-coded text. + """ + class FailureType(proto.Enum): + r"""The name of a fatal problem encountered during the execution + of the build. + + Values: + FAILURE_TYPE_UNSPECIFIED (0): + Type unspecified + PUSH_FAILED (1): + Unable to push the image to the repository. + PUSH_IMAGE_NOT_FOUND (2): + Final image not found. + PUSH_NOT_AUTHORIZED (3): + Unauthorized push of the final image. + LOGGING_FAILURE (4): + Backend logging failures. Should retry. + USER_BUILD_STEP (5): + A build step has failed. + FETCH_SOURCE_FAILED (6): + The source fetching has failed. + """ + FAILURE_TYPE_UNSPECIFIED = 0 + PUSH_FAILED = 1 + PUSH_IMAGE_NOT_FOUND = 2 + PUSH_NOT_AUTHORIZED = 3 + LOGGING_FAILURE = 4 + USER_BUILD_STEP = 5 + FETCH_SOURCE_FAILED = 6 + + type_: 'Build.FailureInfo.FailureType' = proto.Field( + proto.ENUM, + number=1, + enum='Build.FailureInfo.FailureType', + ) + detail: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=45, + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=16, + ) + status: Status = proto.Field( + proto.ENUM, + number=2, + enum=Status, + ) + status_detail: str = proto.Field( + proto.STRING, + number=24, + ) + source: 'Source' = proto.Field( + proto.MESSAGE, + number=3, + message='Source', + ) + steps: MutableSequence['BuildStep'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='BuildStep', + ) + results: 'Results' = proto.Field( + proto.MESSAGE, + number=10, + message='Results', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + queue_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=40, + message=duration_pb2.Duration, + ) + artifacts: 'Artifacts' = proto.Field( + proto.MESSAGE, + number=37, + message='Artifacts', + ) + logs_bucket: str = proto.Field( + proto.STRING, + number=19, + ) + source_provenance: 'SourceProvenance' = proto.Field( + proto.MESSAGE, + number=21, + message='SourceProvenance', + ) + build_trigger_id: str = proto.Field( + proto.STRING, + number=22, + ) + options: 'BuildOptions' = proto.Field( + proto.MESSAGE, + number=23, + message='BuildOptions', + ) + log_url: str = proto.Field( + proto.STRING, + number=25, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=29, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=31, + ) + secrets: MutableSequence['Secret'] = proto.RepeatedField( + proto.MESSAGE, + number=32, + message='Secret', + ) + timing: MutableMapping[str, 'TimeSpan'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=33, + message='TimeSpan', + ) + approval: 'BuildApproval' = proto.Field( + proto.MESSAGE, + number=44, + message='BuildApproval', + ) + service_account: str = proto.Field( + proto.STRING, + number=42, + ) + available_secrets: 'Secrets' = proto.Field( + proto.MESSAGE, + number=47, + message='Secrets', + ) + warnings: MutableSequence[Warning] = proto.RepeatedField( + proto.MESSAGE, + number=49, + message=Warning, + ) + failure_info: FailureInfo = proto.Field( + proto.MESSAGE, + number=51, + message=FailureInfo, + ) + + +class Artifacts(proto.Message): + r"""Artifacts produced by a build that should be uploaded upon + successful completion of all build steps. + + Attributes: + images (MutableSequence[str]): + A list of images to be pushed upon the + successful completion of all build steps. + + The images will be pushed using the builder + service account's credentials. + The digests of the pushed images will be stored + in the Build resource's results field. + + If any of the images fail to be pushed, the + build is marked FAILURE. + objects (google.cloud.devtools.cloudbuild_v1.types.Artifacts.ArtifactObjects): + A list of objects to be uploaded to Cloud + Storage upon successful completion of all build + steps. + Files in the workspace matching specified paths + globs will be uploaded to the specified Cloud + Storage location using the builder service + account's credentials. + + The location and generation of the uploaded + objects will be stored in the Build resource's + results field. + + If any objects fail to be pushed, the build is + marked FAILURE. + maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.MavenArtifact]): + A list of Maven artifacts to be uploaded to + Artifact Registry upon successful completion of + all build steps. + Artifacts in the workspace matching specified + paths globs will be uploaded to the specified + Artifact Registry repository using the builder + service account's credentials. + + If any artifacts fail to be pushed, the build is + marked FAILURE. + python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.PythonPackage]): + A list of Python packages to be uploaded to + Artifact Registry upon successful completion of + all build steps. + The build service account credentials will be + used to perform the upload. + If any objects fail to be pushed, the build is + marked FAILURE. + npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.NpmPackage]): + A list of npm packages to be uploaded to + Artifact Registry upon successful completion of + all build steps. + Npm packages in the specified paths will be + uploaded to the specified Artifact Registry + repository using the builder service account's + credentials. + + If any packages fail to be pushed, the build is + marked FAILURE. + """ + + class ArtifactObjects(proto.Message): + r"""Files in the workspace to upload to Cloud Storage upon + successful completion of all build steps. + + Attributes: + location (str): + Cloud Storage bucket and optional object path, in the form + "gs://bucket/path/to/somewhere/". (see `Bucket Name + Requirements `__). + + Files in the workspace matching any path pattern will be + uploaded to Cloud Storage with this location as a prefix. + paths (MutableSequence[str]): + Path globs used to match files in the build's + workspace. + timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing all artifact objects. + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + class MavenArtifact(proto.Message): + r"""A Maven artifact to upload to Artifact Registry upon + successful completion of all build steps. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-maven.pkg.dev/$PROJECT/$REPOSITORY" + Artifact in the workspace specified by path will + be uploaded to Artifact Registry with this + location as a prefix. + path (str): + Path to an artifact in the build's workspace + to be uploaded to Artifact Registry. + This can be either an absolute path, + e.g. + /workspace/my-app/target/my-app-1.0.SNAPSHOT.jar + or a relative path from /workspace, + e.g. my-app/target/my-app-1.0.SNAPSHOT.jar. + artifact_id (str): + Maven ``artifactId`` value used when uploading the artifact + to Artifact Registry. + group_id (str): + Maven ``groupId`` value used when uploading the artifact to + Artifact Registry. + version (str): + Maven ``version`` value used when uploading the artifact to + Artifact Registry. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + artifact_id: str = proto.Field( + proto.STRING, + number=3, + ) + group_id: str = proto.Field( + proto.STRING, + number=4, + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + + class PythonPackage(proto.Message): + r"""Python package to upload to Artifact Registry upon successful + completion of all build steps. A package can encapsulate + multiple objects to be uploaded to a single repository. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-python.pkg.dev/$PROJECT/$REPOSITORY" + Files in the workspace matching any path pattern + will be uploaded to Artifact Registry with this + location as a prefix. + paths (MutableSequence[str]): + Path globs used to match files in the build's workspace. For + Python/ Twine, this is usually ``dist/*``, and sometimes + additionally an ``.asc`` file. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class NpmPackage(proto.Message): + r"""Npm package to upload to Artifact Registry upon successful + completion of all build steps. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-npm.pkg.dev/$PROJECT/$REPOSITORY" + Npm package in the workspace specified by path + will be zipped and uploaded to Artifact Registry + with this location as a prefix. + package_path (str): + Path to the package.json. + e.g. workspace/path/to/package + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + package_path: str = proto.Field( + proto.STRING, + number=2, + ) + + images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + objects: ArtifactObjects = proto.Field( + proto.MESSAGE, + number=2, + message=ArtifactObjects, + ) + maven_artifacts: MutableSequence[MavenArtifact] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=MavenArtifact, + ) + python_packages: MutableSequence[PythonPackage] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=PythonPackage, + ) + npm_packages: MutableSequence[NpmPackage] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=NpmPackage, + ) + + +class TimeSpan(proto.Message): + r"""Start and end times for a build execution phase. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start of time span. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End of time span. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BuildOperationMetadata(proto.Message): + r"""Metadata for build operations. + + Attributes: + build (google.cloud.devtools.cloudbuild_v1.types.Build): + The build that the operation is tracking. + """ + + build: 'Build' = proto.Field( + proto.MESSAGE, + number=1, + message='Build', + ) + + +class SourceProvenance(proto.Message): + r"""Provenance of the source. Ways to find the original source, + or verify that some source was used for this build. + + Attributes: + resolved_storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): + A copy of the build's ``source.storage_source``, if exists, + with any generations resolved. + resolved_repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + A copy of the build's ``source.repo_source``, if exists, + with any revisions resolved. + resolved_storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): + A copy of the build's ``source.storage_source_manifest``, if + exists, with any revisions resolved. This feature is in + Preview. + file_hashes (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.FileHashes]): + Output only. Hash(es) of the build source, which can be used + to verify that the original source integrity was maintained + in the build. Note that ``FileHashes`` will only be + populated if ``BuildOptions`` has requested a + ``SourceProvenanceHash``. + + The keys to this map are file paths used as build source and + the values contain the hash values for those files. + + If the build source came in a single package such as a + gzipped tarfile (``.tar.gz``), the ``FileHash`` will be for + the single path to that file. + """ + + resolved_storage_source: 'StorageSource' = proto.Field( + proto.MESSAGE, + number=3, + message='StorageSource', + ) + resolved_repo_source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=6, + message='RepoSource', + ) + resolved_storage_source_manifest: 'StorageSourceManifest' = proto.Field( + proto.MESSAGE, + number=9, + message='StorageSourceManifest', + ) + file_hashes: MutableMapping[str, 'FileHashes'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message='FileHashes', + ) + + +class FileHashes(proto.Message): + r"""Container message for hashes of byte content of files, used + in SourceProvenance messages to verify integrity of source input + to the build. + + Attributes: + file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash]): + Collection of file hashes. + """ + + file_hash: MutableSequence['Hash'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Hash', + ) + + +class Hash(proto.Message): + r"""Container message for hash values. + + Attributes: + type_ (google.cloud.devtools.cloudbuild_v1.types.Hash.HashType): + The type of hash that was performed. + value (bytes): + The hash value. + """ + class HashType(proto.Enum): + r"""Specifies the hash algorithm, if any. + + Values: + NONE (0): + No hash requested. + SHA256 (1): + Use a sha256 hash. + MD5 (2): + Use a md5 hash. + SHA512 (4): + Use a sha512 hash. + """ + NONE = 0 + SHA256 = 1 + MD5 = 2 + SHA512 = 4 + + type_: HashType = proto.Field( + proto.ENUM, + number=1, + enum=HashType, + ) + value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class Secrets(proto.Message): + r"""Secrets and secret environment variables. + + Attributes: + secret_manager (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.SecretManagerSecret]): + Secrets in Secret Manager and associated + secret environment variable. + inline (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.InlineSecret]): + Secrets encrypted with KMS key and the + associated secret environment variable. + """ + + secret_manager: MutableSequence['SecretManagerSecret'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SecretManagerSecret', + ) + inline: MutableSequence['InlineSecret'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InlineSecret', + ) + + +class InlineSecret(proto.Message): + r"""Pairs a set of secret environment variables mapped to + encrypted values with the Cloud KMS key to use to decrypt the + value. + + Attributes: + kms_key_name (str): + Resource name of Cloud KMS crypto key to decrypt the + encrypted value. In format: + projects/\ */locations/*/keyRings/*/cryptoKeys/* + env_map (MutableMapping[str, bytes]): + Map of environment variable name to its + encrypted value. + Secret environment variables must be unique + across all of a build's secrets, and must be + used by at least one build step. Values can be + at most 64 KB in size. There can be at most 100 + secret values across all of a build's secrets. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + env_map: MutableMapping[str, bytes] = proto.MapField( + proto.STRING, + proto.BYTES, + number=2, + ) + + +class SecretManagerSecret(proto.Message): + r"""Pairs a secret environment variable with a SecretVersion in + Secret Manager. + + Attributes: + version_name (str): + Resource name of the SecretVersion. In format: + projects/\ */secrets/*/versions/\* + env (str): + Environment variable name to associate with + the secret. Secret environment variables must be + unique across all of a build's secrets, and must + be used by at least one build step. + """ + + version_name: str = proto.Field( + proto.STRING, + number=1, + ) + env: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Secret(proto.Message): + r"""Pairs a set of secret environment variables containing encrypted + values with the Cloud KMS key to use to decrypt the value. Note: Use + ``kmsKeyName`` with ``available_secrets`` instead of using + ``kmsKeyName`` with ``secret``. For instructions see: + https://cloud.google.com/cloud-build/docs/securing-builds/use-encrypted-credentials. + + Attributes: + kms_key_name (str): + Cloud KMS key name to use to decrypt these + envs. + secret_env (MutableMapping[str, bytes]): + Map of environment variable name to its + encrypted value. + Secret environment variables must be unique + across all of a build's secrets, and must be + used by at least one build step. Values can be + at most 64 KB in size. There can be at most 100 + secret values across all of a build's secrets. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + secret_env: MutableMapping[str, bytes] = proto.MapField( + proto.STRING, + proto.BYTES, + number=3, + ) + + +class CreateBuildRequest(proto.Message): + r"""Request to create a new build. + + Attributes: + parent (str): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Required. Build resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + build: 'Build' = proto.Field( + proto.MESSAGE, + number=2, + message='Build', + ) + + +class GetBuildRequest(proto.Message): + r"""Request to get a build. + + Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. ID of the build. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBuildsRequest(proto.Message): + r"""Request to list builds. + + Attributes: + parent (str): + The parent of the collection of ``Builds``. Format: + ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project. + page_size (int): + Number of results to return in the list. + page_token (str): + The page token for the next page of Builds. + + If unspecified, the first page of results is returned. + + If the token is rejected for any reason, INVALID_ARGUMENT + will be thrown. In this case, the token should be discarded, + and pagination should be restarted from the first page of + results. + + See https://google.aip.dev/158 for more. + filter (str): + The raw filter text to constrain the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=9, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ListBuildsResponse(proto.Message): + r"""Response including listed builds. + + Attributes: + builds (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build]): + Builds will be sorted by ``create_time``, descending. + next_page_token (str): + Token to receive the next page of results. + This will be absent if the end of the response + list has been reached. + """ + + @property + def raw_page(self): + return self + + builds: MutableSequence['Build'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Build', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelBuildRequest(proto.Message): + r"""Request to cancel an ongoing build. + + Attributes: + name (str): + The name of the ``Build`` to cancel. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. ID of the build. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ApproveBuildRequest(proto.Message): + r"""Request to approve or reject a pending build. + + Attributes: + name (str): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}". + approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Approval decision and metadata. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + approval_result: 'ApprovalResult' = proto.Field( + proto.MESSAGE, + number=2, + message='ApprovalResult', + ) + + +class BuildApproval(proto.Message): + r"""BuildApproval describes a build's approval configuration, + state, and result. + + Attributes: + state (google.cloud.devtools.cloudbuild_v1.types.BuildApproval.State): + Output only. The state of this build's + approval. + config (google.cloud.devtools.cloudbuild_v1.types.ApprovalConfig): + Output only. Configuration for manual + approval of this build. + result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Output only. Result of manual approval for + this Build. + """ + class State(proto.Enum): + r"""Specifies the current state of a build's approval. + + Values: + STATE_UNSPECIFIED (0): + Default enum type. This should not be used. + PENDING (1): + Build approval is pending. + APPROVED (2): + Build approval has been approved. + REJECTED (3): + Build approval has been rejected. + CANCELLED (5): + Build was cancelled while it was still + pending approval. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + APPROVED = 2 + REJECTED = 3 + CANCELLED = 5 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + config: 'ApprovalConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='ApprovalConfig', + ) + result: 'ApprovalResult' = proto.Field( + proto.MESSAGE, + number=3, + message='ApprovalResult', + ) + + +class ApprovalConfig(proto.Message): + r"""ApprovalConfig describes configuration for manual approval of + a build. + + Attributes: + approval_required (bool): + Whether or not approval is needed. If this is + set on a build, it will become pending when + created, and will need to be explicitly approved + to start. + """ + + approval_required: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ApprovalResult(proto.Message): + r"""ApprovalResult describes the decision and associated metadata + of a manual approval of a build. + + Attributes: + approver_account (str): + Output only. Email of the user that called + the ApproveBuild API to approve or reject a + build at the time that the API was called. + approval_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the approval + decision was made. + decision (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult.Decision): + Required. The decision of this manual + approval. + comment (str): + Optional. An optional comment for this manual + approval result. + url (str): + Optional. An optional URL tied to this manual + approval result. This field is essentially the + same as comment, except that it will be rendered + by the UI differently. An example use case is a + link to an external job that approved this + Build. + """ + class Decision(proto.Enum): + r"""Specifies whether or not this manual approval result is to + approve or reject a build. + + Values: + DECISION_UNSPECIFIED (0): + Default enum type. This should not be used. + APPROVED (1): + Build is approved. + REJECTED (2): + Build is rejected. + """ + DECISION_UNSPECIFIED = 0 + APPROVED = 1 + REJECTED = 2 + + approver_account: str = proto.Field( + proto.STRING, + number=2, + ) + approval_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + decision: Decision = proto.Field( + proto.ENUM, + number=4, + enum=Decision, + ) + comment: str = proto.Field( + proto.STRING, + number=5, + ) + url: str = proto.Field( + proto.STRING, + number=6, + ) + + +class BuildTrigger(proto.Message): + r"""Configuration for an automated build in response to source + repository changes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource_name (str): + The ``Trigger`` name with format: + ``projects/{project}/locations/{location}/triggers/{trigger}``, + where {trigger} is a unique identifier generated by the + service. + id (str): + Output only. Unique identifier of the + trigger. + description (str): + Human-readable description of this trigger. + name (str): + User-assigned name of the trigger. Must be + unique within the project. Trigger names must + meet the following requirements: + + They must contain only alphanumeric characters + and dashes. + They can be 1-64 characters long. + + They must begin and end with an alphanumeric + character. + tags (MutableSequence[str]): + Tags for annotation of a ``BuildTrigger`` + trigger_template (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Template describing the types of source changes to trigger a + build. + + Branch and tag names in trigger templates are interpreted as + regular expressions. Any branch or tag change that matches + that regular expression will trigger a build. + + Mutually exclusive with ``github``. + github (google.cloud.devtools.cloudbuild_v1.types.GitHubEventsConfig): + GitHubEventsConfig describes the configuration of a trigger + that creates a build whenever a GitHub event is received. + + Mutually exclusive with ``trigger_template``. + pubsub_config (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig): + PubsubConfig describes the configuration of a + trigger that creates a build whenever a Pub/Sub + message is published. + webhook_config (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig): + WebhookConfig describes the configuration of + a trigger that creates a build whenever a + webhook is sent to a trigger's webhook URL. + autodetect (bool): + Autodetect build configuration. The + following precedence is used (case insensitive): + 1. cloudbuild.yaml + 2. cloudbuild.yml + 3. cloudbuild.json + 4. Dockerfile + + Currently only available for GitHub App + Triggers. + + This field is a member of `oneof`_ ``build_template``. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Contents of the build template. + + This field is a member of `oneof`_ ``build_template``. + filename (str): + Path, from the source root, to the build + configuration file (i.e. cloudbuild.yaml). + + This field is a member of `oneof`_ ``build_template``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the trigger was + created. + disabled (bool): + If true, the trigger will never automatically + execute a build. + substitutions (MutableMapping[str, str]): + Substitutions for Build resource. The keys must match the + following regular expression: ``^_[A-Z0-9_]+$``. + ignored_files (MutableSequence[str]): + ignored_files and included_files are file glob matches using + https://golang.org/pkg/path/filepath/#Match extended with + support for "**". + + If ignored_files and changed files are both empty, then they + are not used to determine whether or not to trigger a build. + + If ignored_files is not empty, then we ignore any files that + match any of the ignored_file globs. If the change has no + files that are outside of the ignored_files globs, then we + do not trigger a build. + included_files (MutableSequence[str]): + If any of the files altered in the commit pass the + ignored_files filter and included_files is empty, then as + far as this filter is concerned, we should trigger the + build. + + If any of the files altered in the commit pass the + ignored_files filter and included_files is not empty, then + we make sure that at least one of those files matches a + included_files glob. If not, then we do not trigger a build. + filter (str): + Optional. A Common Expression Language + string. + service_account (str): + The service account used for all user-controlled operations + including UpdateBuildTrigger, RunBuildTrigger, CreateBuild, + and CancelBuild. If no service account is set, then the + standard Cloud Build service account + ([PROJECT_NUM]@system.gserviceaccount.com) will be used + instead. Format: + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`` + repository_event_config (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig): + The configuration of a trigger that creates a + build whenever an event from Repo API is + received. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=34, + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=10, + ) + name: str = proto.Field( + proto.STRING, + number=21, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=19, + ) + trigger_template: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=7, + message='RepoSource', + ) + github: 'GitHubEventsConfig' = proto.Field( + proto.MESSAGE, + number=13, + message='GitHubEventsConfig', + ) + pubsub_config: 'PubsubConfig' = proto.Field( + proto.MESSAGE, + number=29, + message='PubsubConfig', + ) + webhook_config: 'WebhookConfig' = proto.Field( + proto.MESSAGE, + number=31, + message='WebhookConfig', + ) + autodetect: bool = proto.Field( + proto.BOOL, + number=18, + oneof='build_template', + ) + build: 'Build' = proto.Field( + proto.MESSAGE, + number=4, + oneof='build_template', + message='Build', + ) + filename: str = proto.Field( + proto.STRING, + number=8, + oneof='build_template', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + ignored_files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + included_files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + filter: str = proto.Field( + proto.STRING, + number=30, + ) + service_account: str = proto.Field( + proto.STRING, + number=33, + ) + repository_event_config: 'RepositoryEventConfig' = proto.Field( + proto.MESSAGE, + number=39, + message='RepositoryEventConfig', + ) + + +class RepositoryEventConfig(proto.Message): + r"""The configuration of a trigger that creates a build whenever + an event from Repo API is received. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + repository (str): + The resource name of the Repo API resource. + repository_type (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig.RepositoryType): + Output only. The type of the SCM vendor the + repository points to. + pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): + Filter to match changes in pull requests. + + This field is a member of `oneof`_ ``filter``. + push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): + Filter to match changes in refs like + branches, tags. + + This field is a member of `oneof`_ ``filter``. + """ + class RepositoryType(proto.Enum): + r"""All possible SCM repo types from Repo API. + + Values: + REPOSITORY_TYPE_UNSPECIFIED (0): + If unspecified, RepositoryType defaults to + GITHUB. + GITHUB (1): + The SCM repo is GITHUB. + GITHUB_ENTERPRISE (2): + The SCM repo is GITHUB Enterprise. + GITLAB_ENTERPRISE (3): + The SCM repo is GITLAB Enterprise. + """ + REPOSITORY_TYPE_UNSPECIFIED = 0 + GITHUB = 1 + GITHUB_ENTERPRISE = 2 + GITLAB_ENTERPRISE = 3 + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + repository_type: RepositoryType = proto.Field( + proto.ENUM, + number=2, + enum=RepositoryType, + ) + pull_request: 'PullRequestFilter' = proto.Field( + proto.MESSAGE, + number=3, + oneof='filter', + message='PullRequestFilter', + ) + push: 'PushFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='filter', + message='PushFilter', + ) + + +class GitHubEventsConfig(proto.Message): + r"""GitHubEventsConfig describes the configuration of a trigger + that creates a build whenever a GitHub event is received. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + installation_id (int): + The installationID that emits the GitHub + event. + owner (str): + Owner of the repository. For example: The + owner for + https://github.com/googlecloudplatform/cloud-builders + is "googlecloudplatform". + name (str): + Name of the repository. For example: The name + for + https://github.com/googlecloudplatform/cloud-builders + is "cloud-builders". + pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): + filter to match changes in pull requests. + + This field is a member of `oneof`_ ``event``. + push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): + filter to match changes in refs like + branches, tags. + + This field is a member of `oneof`_ ``event``. + """ + + installation_id: int = proto.Field( + proto.INT64, + number=1, + ) + owner: str = proto.Field( + proto.STRING, + number=6, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + pull_request: 'PullRequestFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='event', + message='PullRequestFilter', + ) + push: 'PushFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='event', + message='PushFilter', + ) + + +class PubsubConfig(proto.Message): + r"""PubsubConfig describes the configuration of a trigger that + creates a build whenever a Pub/Sub message is published. + + Attributes: + subscription (str): + Output only. Name of the subscription. Format is + ``projects/{project}/subscriptions/{subscription}``. + topic (str): + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. + service_account_email (str): + Service account that will make the push + request. + state (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig.State): + Potential issues with the underlying Pub/Sub + subscription configuration. Only populated on + get requests. + """ + class State(proto.Enum): + r"""Enumerates potential issues with the underlying Pub/Sub + subscription configuration. + + Values: + STATE_UNSPECIFIED (0): + The subscription configuration has not been + checked. + OK (1): + The Pub/Sub subscription is properly + configured. + SUBSCRIPTION_DELETED (2): + The subscription has been deleted. + TOPIC_DELETED (3): + The topic has been deleted. + SUBSCRIPTION_MISCONFIGURED (4): + Some of the subscription's field are + misconfigured. + """ + STATE_UNSPECIFIED = 0 + OK = 1 + SUBSCRIPTION_DELETED = 2 + TOPIC_DELETED = 3 + SUBSCRIPTION_MISCONFIGURED = 4 + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class WebhookConfig(proto.Message): + r"""WebhookConfig describes the configuration of a trigger that + creates a build whenever a webhook is sent to a trigger's + webhook URL. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + secret (str): + Required. Resource name for the secret + required as a URL parameter. + + This field is a member of `oneof`_ ``auth_method``. + state (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig.State): + Potential issues with the underlying Pub/Sub + subscription configuration. Only populated on + get requests. + """ + class State(proto.Enum): + r"""Enumerates potential issues with the Secret Manager secret + provided by the user. + + Values: + STATE_UNSPECIFIED (0): + The webhook auth configuration not been + checked. + OK (1): + The auth configuration is properly setup. + SECRET_DELETED (2): + The secret provided in auth_method has been deleted. + """ + STATE_UNSPECIFIED = 0 + OK = 1 + SECRET_DELETED = 2 + + secret: str = proto.Field( + proto.STRING, + number=3, + oneof='auth_method', + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class PullRequestFilter(proto.Message): + r"""PullRequestFilter contains filter properties for matching + GitHub Pull Requests. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + branch (str): + Regex of branches to match. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + comment_control (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter.CommentControl): + Configure builds to run whether a repository owner or + collaborator need to comment ``/gcbrun``. + invert_regex (bool): + If true, branches that do NOT match the git_ref will trigger + a build. + """ + class CommentControl(proto.Enum): + r"""Controls behavior of Pull Request comments. + + Values: + COMMENTS_DISABLED (0): + Do not require comments on Pull Requests + before builds are triggered. + COMMENTS_ENABLED (1): + Enforce that repository owners or + collaborators must comment on Pull Requests + before builds are triggered. + COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY (2): + Enforce that repository owners or + collaborators must comment on external + contributors' Pull Requests before builds are + triggered. + """ + COMMENTS_DISABLED = 0 + COMMENTS_ENABLED = 1 + COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2 + + branch: str = proto.Field( + proto.STRING, + number=2, + oneof='git_ref', + ) + comment_control: CommentControl = proto.Field( + proto.ENUM, + number=5, + enum=CommentControl, + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class PushFilter(proto.Message): + r"""Push contains filter properties for matching GitHub git + pushes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + branch (str): + Regexes matching branches to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + tag (str): + Regexes matching tags to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + invert_regex (bool): + When true, only trigger a build if the revision regex does + NOT match the git_ref regex. + """ + + branch: str = proto.Field( + proto.STRING, + number=2, + oneof='git_ref', + ) + tag: str = proto.Field( + proto.STRING, + number=3, + oneof='git_ref', + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class CreateBuildTriggerRequest(proto.Message): + r"""Request to create a new ``BuildTrigger``. + + Attributes: + parent (str): + The parent resource where this trigger will be created. + Format: ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project for which to + configure automatic builds. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger: 'BuildTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='BuildTrigger', + ) + + +class GetBuildTriggerRequest(proto.Message): + r"""Returns the ``BuildTrigger`` with the specified ID. + + Attributes: + name (str): + The name of the ``Trigger`` to retrieve. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBuildTriggersRequest(proto.Message): + r"""Request to list existing ``BuildTriggers``. + + Attributes: + parent (str): + The parent of the collection of ``Triggers``. Format: + ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project for which to list + BuildTriggers. + page_size (int): + Number of results to return in the list. + page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBuildTriggersResponse(proto.Message): + r"""Response containing existing ``BuildTriggers``. + + Attributes: + triggers (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): + ``BuildTriggers`` for the project, sorted by ``create_time`` + descending. + next_page_token (str): + Token to receive the next page of results. + """ + + @property + def raw_page(self): + return self + + triggers: MutableSequence['BuildTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BuildTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteBuildTriggerRequest(proto.Message): + r"""Request to delete a ``BuildTrigger``. + + Attributes: + name (str): + The name of the ``Trigger`` to delete. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateBuildTriggerRequest(proto.Message): + r"""Request to update an existing ``BuildTrigger``. + + Attributes: + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to update. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to update. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + trigger: 'BuildTrigger' = proto.Field( + proto.MESSAGE, + number=3, + message='BuildTrigger', + ) + + +class BuildOptions(proto.Message): + r"""Optional arguments to enable specific features of builds. + + Attributes: + source_provenance_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash.HashType]): + Requested hash for SourceProvenance. + requested_verify_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.VerifyOption): + Requested verifiability options. + machine_type (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.MachineType): + Compute Engine machine type on which to run + the build. + disk_size_gb (int): + Requested disk size for the VM that runs the build. Note + that this is *NOT* "disk free"; some of the space will be + used by the operating system and build utilities. Also note + that this is the minimum disk size that will be allocated + for the build -- the build may run with a larger disk than + requested. At present, the maximum disk size is 2000GB; + builds that request more than the maximum are rejected with + an error. + substitution_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.SubstitutionOption): + Option to specify behavior when there is an error in the + substitution checks. + + NOTE: this is always set to ALLOW_LOOSE for triggered builds + and cannot be overridden in the build configuration file. + dynamic_substitutions (bool): + Option to specify whether or not to apply + bash style string operations to the + substitutions. + NOTE: this is always enabled for triggered + builds and cannot be overridden in the build + configuration file. + log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): + Option to define build log streaming behavior + to Cloud Storage. + worker_pool (str): + This field deprecated; please use ``pool.name`` instead. + pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): + Optional. Specification for execution on a ``WorkerPool``. + + See `running builds in a private + pool `__ + for more information. + logging (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LoggingMode): + Option to specify the logging mode, which + determines if and where build logs are stored. + env (MutableSequence[str]): + A list of global environment variable + definitions that will exist for all build steps + in this build. If a variable is defined in both + globally and in a build step, the variable will + use the build step value. + The elements are of the form "KEY=VALUE" for the + environment variable "KEY" being given the value + "VALUE". + secret_env (MutableSequence[str]): + A list of global environment variables, which are encrypted + using a Cloud Key Management Service crypto key. These + values must be specified in the build's ``Secret``. These + variables will be available to all build steps in this + build. + volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): + Global list of volumes to mount for ALL build + steps + Each volume is created as an empty volume prior + to starting the build process. Upon completion + of the build, volumes and their contents are + discarded. Global volume names and paths cannot + conflict with the volumes defined a build step. + + Using a global volume in a build with only one + step is not valid as it is indicative of a build + request with an incorrect configuration. + default_logs_bucket_behavior (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.DefaultLogsBucketBehavior): + Optional. Option to specify how default logs + buckets are setup. + """ + class VerifyOption(proto.Enum): + r"""Specifies the manner in which the build should be verified, if at + all. + + If a verified build is requested, and any part of the process to + generate and upload provenance fails, the build will also fail. + + If the build does not request verification then that process may + occur, but is not guaranteed to. If it does occur and fails, the + build will not fail. + + For more information, see `Viewing Build + Provenance `__. + + Values: + NOT_VERIFIED (0): + Not a verifiable build (the default). + VERIFIED (1): + Build must be verified. + """ + NOT_VERIFIED = 0 + VERIFIED = 1 + + class MachineType(proto.Enum): + r"""Supported Compute Engine machine types. For more information, see + `Machine + types `__. + + Values: + UNSPECIFIED (0): + Standard machine type. + N1_HIGHCPU_8 (1): + Highcpu machine with 8 CPUs. + N1_HIGHCPU_32 (2): + Highcpu machine with 32 CPUs. + E2_HIGHCPU_8 (5): + Highcpu e2 machine with 8 CPUs. + E2_HIGHCPU_32 (6): + Highcpu e2 machine with 32 CPUs. + E2_MEDIUM (7): + E2 machine with 1 CPU. + """ + UNSPECIFIED = 0 + N1_HIGHCPU_8 = 1 + N1_HIGHCPU_32 = 2 + E2_HIGHCPU_8 = 5 + E2_HIGHCPU_32 = 6 + E2_MEDIUM = 7 + + class SubstitutionOption(proto.Enum): + r"""Specifies the behavior when there is an error in the + substitution checks. + + Values: + MUST_MATCH (0): + Fails the build if error in substitutions + checks, like missing a substitution in the + template or in the map. + ALLOW_LOOSE (1): + Do not fail the build if error in + substitutions checks. + """ + MUST_MATCH = 0 + ALLOW_LOOSE = 1 + + class LogStreamingOption(proto.Enum): + r"""Specifies the behavior when writing build logs to Cloud + Storage. + + Values: + STREAM_DEFAULT (0): + Service may automatically determine build log + streaming behavior. + STREAM_ON (1): + Build logs should be streamed to Cloud + Storage. + STREAM_OFF (2): + Build logs should not be streamed to Cloud + Storage; they will be written when the build is + completed. + """ + STREAM_DEFAULT = 0 + STREAM_ON = 1 + STREAM_OFF = 2 + + class LoggingMode(proto.Enum): + r"""Specifies the logging mode. + + Values: + LOGGING_UNSPECIFIED (0): + The service determines the logging mode. The default is + ``LEGACY``. Do not rely on the default logging behavior as + it may change in the future. + LEGACY (1): + Build logs are stored in Cloud Logging and + Cloud Storage. + GCS_ONLY (2): + Build logs are stored in Cloud Storage. + STACKDRIVER_ONLY (3): + This option is the same as CLOUD_LOGGING_ONLY. + CLOUD_LOGGING_ONLY (5): + Build logs are stored in Cloud Logging. Selecting this + option will not allow `logs + streaming `__. + NONE (4): + Turn off all logging. No build logs will be + captured. + """ + LOGGING_UNSPECIFIED = 0 + LEGACY = 1 + GCS_ONLY = 2 + STACKDRIVER_ONLY = 3 + CLOUD_LOGGING_ONLY = 5 + NONE = 4 + + class DefaultLogsBucketBehavior(proto.Enum): + r"""Default GCS log bucket behavior options. + + Values: + DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): + Unspecified. + REGIONAL_USER_OWNED_BUCKET (1): + Bucket is located in user-owned project in + the same region as the build. The builder + service account must have access to create and + write to GCS buckets in the build project. + """ + DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 + REGIONAL_USER_OWNED_BUCKET = 1 + + class PoolOption(proto.Message): + r"""Details about how a build should be executed on a ``WorkerPool``. + + See `running builds in a private + pool `__ + for more information. + + Attributes: + name (str): + The ``WorkerPool`` resource to execute the build on. You + must have ``cloudbuild.workerpools.use`` on the project + hosting the WorkerPool. + + Format + projects/{project}/locations/{location}/workerPools/{workerPoolId} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + source_provenance_hash: MutableSequence['Hash.HashType'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='Hash.HashType', + ) + requested_verify_option: VerifyOption = proto.Field( + proto.ENUM, + number=2, + enum=VerifyOption, + ) + machine_type: MachineType = proto.Field( + proto.ENUM, + number=3, + enum=MachineType, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=6, + ) + substitution_option: SubstitutionOption = proto.Field( + proto.ENUM, + number=4, + enum=SubstitutionOption, + ) + dynamic_substitutions: bool = proto.Field( + proto.BOOL, + number=17, + ) + log_streaming_option: LogStreamingOption = proto.Field( + proto.ENUM, + number=5, + enum=LogStreamingOption, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=7, + ) + pool: PoolOption = proto.Field( + proto.MESSAGE, + number=19, + message=PoolOption, + ) + logging: LoggingMode = proto.Field( + proto.ENUM, + number=11, + enum=LoggingMode, + ) + env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + secret_env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + volumes: MutableSequence['Volume'] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message='Volume', + ) + default_logs_bucket_behavior: DefaultLogsBucketBehavior = proto.Field( + proto.ENUM, + number=21, + enum=DefaultLogsBucketBehavior, + ) + + +class ReceiveTriggerWebhookRequest(proto.Message): + r"""ReceiveTriggerWebhookRequest [Experimental] is the request object + accepted by the ReceiveTriggerWebhook method. + + Attributes: + name (str): + The name of the ``ReceiveTriggerWebhook`` to retrieve. + Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + body (google.api.httpbody_pb2.HttpBody): + HTTP request body. + project_id (str): + Project in which the specified trigger lives + trigger (str): + Name of the trigger to run the payload + against + secret (str): + Secret token used for authorization if an + OAuth token isn't provided. + """ + + name: str = proto.Field( + proto.STRING, + number=5, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=1, + message=httpbody_pb2.HttpBody, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + trigger: str = proto.Field( + proto.STRING, + number=3, + ) + secret: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ReceiveTriggerWebhookResponse(proto.Message): + r"""ReceiveTriggerWebhookResponse [Experimental] is the response object + for the ReceiveTriggerWebhook method. + + """ + + +class WorkerPool(proto.Message): + r"""Configuration for a ``WorkerPool``. + + Cloud Build owns and maintains a pool of workers for general use and + have no access to a project's private network. By default, builds + submitted to Cloud Build will use a worker from this pool. + + If your build needs access to resources on a private network, create + and use a ``WorkerPool`` to run your builds. Private + ``WorkerPool``\ s give your builds access to any single VPC network + that you administer, including any on-prem resources connected to + that VPC network. For an overview of private pools, see `Private + pools + overview `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The resource name of the ``WorkerPool``, with + format + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + The value of ``{worker_pool}`` is provided by + ``worker_pool_id`` in ``CreateWorkerPool`` request and the + value of ``{location}`` is determined by the endpoint + accessed. + display_name (str): + A user-specified, human-readable name for the + ``WorkerPool``. If provided, this value must be 1-63 + characters. + uid (str): + Output only. A unique identifier for the ``WorkerPool``. + annotations (MutableMapping[str, str]): + User specified annotations. See + https://google.aip.dev/128#annotations + for more details such as format and size + limitations. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to create the + ``WorkerPool`` was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to update the + ``WorkerPool`` was received. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to delete the + ``WorkerPool`` was received. + state (google.cloud.devtools.cloudbuild_v1.types.WorkerPool.State): + Output only. ``WorkerPool`` state. + private_pool_v1_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config): + Legacy Private Pool configuration. + + This field is a member of `oneof`_ ``config``. + etag (str): + Output only. Checksum computed by the server. + May be sent on update and delete requests to + ensure that the client has an up-to-date value + before proceeding. + """ + class State(proto.Enum): + r"""State of the ``WorkerPool``. + + Values: + STATE_UNSPECIFIED (0): + State of the ``WorkerPool`` is unknown. + CREATING (1): + ``WorkerPool`` is being created. + RUNNING (2): + ``WorkerPool`` is running. + DELETING (3): + ``WorkerPool`` is being deleted: cancelling builds and + draining workers. + DELETED (4): + ``WorkerPool`` is deleted. + UPDATING (5): + ``WorkerPool`` is being updated; new builds cannot be run. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + RUNNING = 2 + DELETING = 3 + DELETED = 4 + UPDATING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + private_pool_v1_config: 'PrivatePoolV1Config' = proto.Field( + proto.MESSAGE, + number=12, + oneof='config', + message='PrivatePoolV1Config', + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class PrivatePoolV1Config(proto.Message): + r"""Configuration for a V1 ``PrivatePool``. + + Attributes: + worker_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.WorkerConfig): + Machine configuration for the workers in the + pool. + network_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig): + Network configuration for the pool. + """ + + class WorkerConfig(proto.Message): + r"""Defines the configuration to be used for creating workers in + the pool. + + Attributes: + machine_type (str): + Machine type of a worker, such as ``e2-medium``. See `Worker + pool config + file `__. + If left blank, Cloud Build will use a sensible default. + disk_size_gb (int): + Size of the disk attached to the worker, in GB. See `Worker + pool config + file `__. + Specify a value of up to 2000. If ``0`` is specified, Cloud + Build will use a standard disk size. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=2, + ) + + class NetworkConfig(proto.Message): + r"""Defines the network configuration for the pool. + + Attributes: + peered_network (str): + Required. Immutable. The network definition that the workers + are peered to. If this section is left empty, the workers + will be peered to ``WorkerPool.project_id`` on the service + producer network. Must be in the format + ``projects/{project}/global/networks/{network}``, where + ``{project}`` is a project number, such as ``12345``, and + ``{network}`` is the name of a VPC network in the project. + See `Understanding network configuration + options `__ + egress_option (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig.EgressOption): + Option to configure network egress for the + workers. + peered_network_ip_range (str): + Immutable. Subnet IP range within the peered network. This + is specified in CIDR notation with a slash and the subnet + prefix size. You can optionally specify an IP address before + the subnet prefix value. e.g. ``192.168.0.0/29`` would + specify an IP range starting at 192.168.0.0 with a prefix + size of 29 bits. ``/16`` would specify a prefix size of 16 + bits, with an automatically determined IP within the peered + VPC. If unspecified, a value of ``/24`` will be used. + """ + class EgressOption(proto.Enum): + r"""Defines the egress option for the pool. + + Values: + EGRESS_OPTION_UNSPECIFIED (0): + If set, defaults to PUBLIC_EGRESS. + NO_PUBLIC_EGRESS (1): + If set, workers are created without any + public address, which prevents network egress to + public IPs unless a network proxy is configured. + PUBLIC_EGRESS (2): + If set, workers are created with a public + address which allows for public internet egress. + """ + EGRESS_OPTION_UNSPECIFIED = 0 + NO_PUBLIC_EGRESS = 1 + PUBLIC_EGRESS = 2 + + peered_network: str = proto.Field( + proto.STRING, + number=1, + ) + egress_option: 'PrivatePoolV1Config.NetworkConfig.EgressOption' = proto.Field( + proto.ENUM, + number=2, + enum='PrivatePoolV1Config.NetworkConfig.EgressOption', + ) + peered_network_ip_range: str = proto.Field( + proto.STRING, + number=3, + ) + + worker_config: WorkerConfig = proto.Field( + proto.MESSAGE, + number=1, + message=WorkerConfig, + ) + network_config: NetworkConfig = proto.Field( + proto.MESSAGE, + number=2, + message=NetworkConfig, + ) + + +class CreateWorkerPoolRequest(proto.Message): + r"""Request to create a new ``WorkerPool``. + + Attributes: + parent (str): + Required. The parent resource where this worker pool will be + created. Format: + ``projects/{project}/locations/{location}``. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. ``WorkerPool`` resource to create. + worker_pool_id (str): + Required. Immutable. The ID to use for the ``WorkerPool``, + which will become the final component of the resource name. + + This value should be 1-63 characters, and valid characters + are /[a-z][0-9]-/. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + worker_pool: 'WorkerPool' = proto.Field( + proto.MESSAGE, + number=2, + message='WorkerPool', + ) + worker_pool_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkerPoolRequest(proto.Message): + r"""Request to get a ``WorkerPool`` with the specified name. + + Attributes: + name (str): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteWorkerPoolRequest(proto.Message): + r"""Request to delete a ``WorkerPool``. + + Attributes: + name (str): + Required. The name of the ``WorkerPool`` to delete. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + etag (str): + Optional. If provided, it must match the + server's etag on the workerpool for the request + to be processed. + allow_missing (bool): + If set to true, and the ``WorkerPool`` is not found, the + request will succeed but no action will be taken on the + server. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkerPoolRequest(proto.Message): + r"""Request to update a ``WorkerPool``. + + Attributes: + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the ``WorkerPool`` to + update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask specifying which fields in ``worker_pool`` to update. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + worker_pool: 'WorkerPool' = proto.Field( + proto.MESSAGE, + number=1, + message='WorkerPool', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListWorkerPoolsRequest(proto.Message): + r"""Request to list ``WorkerPool``\ s. + + Attributes: + parent (str): + Required. The parent of the collection of ``WorkerPools``. + Format: ``projects/{project}/locations/{location}``. + page_size (int): + The maximum number of ``WorkerPool``\ s to return. The + service may return fewer than this value. If omitted, the + server will use a sensible default. + page_token (str): + A page token, received from a previous ``ListWorkerPools`` + call. Provide this to retrieve the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkerPoolsResponse(proto.Message): + r"""Response containing existing ``WorkerPools``. + + Attributes: + worker_pools (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): + ``WorkerPools`` for the specified project. + next_page_token (str): + Continuation token used to page through large + result sets. Provide this value in a subsequent + ListWorkerPoolsRequest to return the next page + of results. + """ + + @property + def raw_page(self): + return self + + worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='WorkerPool', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``CreateWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` to create. Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``UpdateWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` being updated. + Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``DeleteWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` being deleted. + Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 00000000..e09b880c --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/devtools/cloudbuild_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py new file mode 100644 index 00000000..a8280c5e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApproveBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py new file mode 100644 index 00000000..e90be4b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApproveBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py new file mode 100644 index 00000000..73320372 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.cancel_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py new file mode 100644 index 00000000..656b5d59 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.cancel_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py new file mode 100644 index 00000000..07750a37 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py new file mode 100644 index 00000000..173aea57 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py new file mode 100644 index 00000000..9fe3fcdf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.create_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py new file mode 100644 index 00000000..8ddcd2f3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = client.create_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py new file mode 100644 index 00000000..cd0a773b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py new file mode 100644 index 00000000..80396e7e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py new file mode 100644 index 00000000..62955bcc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + await client.delete_build_trigger(request=request) + + +# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py new file mode 100644 index 00000000..249ba150 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + client.delete_build_trigger(request=request) + + +# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py new file mode 100644 index 00000000..257fa9ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py new file mode 100644 index 00000000..a2b9f632 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py new file mode 100644 index 00000000..585bce61 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.get_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py new file mode 100644 index 00000000..d767fe6c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.get_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py new file mode 100644 index 00000000..373b419b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = await client.get_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py new file mode 100644 index 00000000..f2dd1102 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = client.get_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py new file mode 100644 index 00000000..1ad3016f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_worker_pool(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py new file mode 100644 index 00000000..fd50d2fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_worker_pool(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py new file mode 100644 index 00000000..43b21efa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuildTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py new file mode 100644 index 00000000..86f6e1c1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuildTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py new file mode 100644 index 00000000..30ad36a2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuilds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py new file mode 100644 index 00000000..9c2813c3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuilds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py new file mode 100644 index 00000000..378636ad --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkerPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py new file mode 100644 index 00000000..d23cdb76 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkerPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py new file mode 100644 index 00000000..133c477b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReceiveTriggerWebhook +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = await client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py new file mode 100644 index 00000000..839f241c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReceiveTriggerWebhook +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py new file mode 100644 index 00000000..8c671273 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetryBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py new file mode 100644 index 00000000..6b1d79f0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetryBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py new file mode 100644 index 00000000..1c33cfb2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py new file mode 100644 index 00000000..78b1a643 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py new file mode 100644 index 00000000..46d6ea7f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.update_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py new file mode 100644 index 00000000..4022a4e4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = client.update_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py new file mode 100644 index 00000000..4152140a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py new file mode 100644 index 00000000..b7bab1b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json new file mode 100644 index 00000000..e379efab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -0,0 +1,3027 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.devtools.cloudbuild.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-build", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.approve_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ApproveBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "approval_result", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "approve_build" + }, + "description": "Sample for ApproveBuild", + "file": "cloudbuild_v1_generated_cloud_build_approve_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_approve_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.approve_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ApproveBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "approval_result", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "approve_build" + }, + "description": "Sample for ApproveBuild", + "file": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.cancel_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CancelBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "cancel_build" + }, + "description": "Sample for CancelBuild", + "file": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.cancel_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CancelBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "cancel_build" + }, + "description": "Sample for CancelBuild", + "file": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "create_build_trigger" + }, + "description": "Sample for CreateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "create_build_trigger" + }, + "description": "Sample for CreateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "build", + "type": "google.cloud.devtools.cloudbuild_v1.types.Build" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_build" + }, + "description": "Sample for CreateBuild", + "file": "cloudbuild_v1_generated_cloud_build_create_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "build", + "type": "google.cloud.devtools.cloudbuild_v1.types.Build" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_build" + }, + "description": "Sample for CreateBuild", + "file": "cloudbuild_v1_generated_cloud_build_create_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "worker_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_worker_pool" + }, + "description": "Sample for CreateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "worker_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_worker_pool" + }, + "description": "Sample for CreateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_build_trigger" + }, + "description": "Sample for DeleteBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_build_trigger" + }, + "description": "Sample for DeleteBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_worker_pool" + }, + "description": "Sample for DeleteWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_worker_pool" + }, + "description": "Sample for DeleteWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "get_build_trigger" + }, + "description": "Sample for GetBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "get_build_trigger" + }, + "description": "Sample for GetBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "get_build" + }, + "description": "Sample for GetBuild", + "file": "cloudbuild_v1_generated_cloud_build_get_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "get_build" + }, + "description": "Sample for GetBuild", + "file": "cloudbuild_v1_generated_cloud_build_get_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", + "shortName": "get_worker_pool" + }, + "description": "Sample for GetWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", + "shortName": "get_worker_pool" + }, + "description": "Sample for GetWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_build_triggers", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuildTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager", + "shortName": "list_build_triggers" + }, + "description": "Sample for ListBuildTriggers", + "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_build_triggers", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuildTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager", + "shortName": "list_build_triggers" + }, + "description": "Sample for ListBuildTriggers", + "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_builds", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuilds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager", + "shortName": "list_builds" + }, + "description": "Sample for ListBuilds", + "file": "cloudbuild_v1_generated_cloud_build_list_builds_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_builds_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_builds", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuilds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager", + "shortName": "list_builds" + }, + "description": "Sample for ListBuilds", + "file": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_worker_pools", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListWorkerPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager", + "shortName": "list_worker_pools" + }, + "description": "Sample for ListWorkerPools", + "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_worker_pools", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListWorkerPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager", + "shortName": "list_worker_pools" + }, + "description": "Sample for ListWorkerPools", + "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.receive_trigger_webhook", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ReceiveTriggerWebhook" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", + "shortName": "receive_trigger_webhook" + }, + "description": "Sample for ReceiveTriggerWebhook", + "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.receive_trigger_webhook", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ReceiveTriggerWebhook" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", + "shortName": "receive_trigger_webhook" + }, + "description": "Sample for ReceiveTriggerWebhook", + "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.retry_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RetryBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "retry_build" + }, + "description": "Sample for RetryBuild", + "file": "cloudbuild_v1_generated_cloud_build_retry_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_retry_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.retry_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RetryBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "retry_build" + }, + "description": "Sample for RetryBuild", + "file": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.run_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RunBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "source", + "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "run_build_trigger" + }, + "description": "Sample for RunBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.run_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RunBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "source", + "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "run_build_trigger" + }, + "description": "Sample for RunBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "update_build_trigger" + }, + "description": "Sample for UpdateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "update_build_trigger" + }, + "description": "Sample for UpdateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_worker_pool" + }, + "description": "Sample for UpdateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_worker_pool" + }, + "description": "Sample for UpdateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py new file mode 100644 index 00000000..78aabad9 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py @@ -0,0 +1,193 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudbuildCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'approve_build': ('name', 'approval_result', ), + 'cancel_build': ('project_id', 'id', 'name', ), + 'create_build': ('project_id', 'build', 'parent', ), + 'create_build_trigger': ('project_id', 'trigger', 'parent', ), + 'create_worker_pool': ('parent', 'worker_pool', 'worker_pool_id', 'validate_only', ), + 'delete_build_trigger': ('project_id', 'trigger_id', 'name', ), + 'delete_worker_pool': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'get_build': ('project_id', 'id', 'name', ), + 'get_build_trigger': ('project_id', 'trigger_id', 'name', ), + 'get_worker_pool': ('name', ), + 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), + 'list_build_triggers': ('project_id', 'parent', 'page_size', 'page_token', ), + 'list_worker_pools': ('parent', 'page_size', 'page_token', ), + 'receive_trigger_webhook': ('name', 'body', 'project_id', 'trigger', 'secret', ), + 'retry_build': ('project_id', 'id', 'name', ), + 'run_build_trigger': ('project_id', 'trigger_id', 'name', 'source', ), + 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), + 'update_worker_pool': ('worker_pool', 'update_mask', 'validate_only', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudbuildCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudbuild client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 00000000..49cab02d --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-build' + + +description = "Google Cloud Build API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-build" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud", "google.cloud.devtools"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py new file mode 100644 index 00000000..e76401f0 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -0,0 +1,10282 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudBuildClient._get_default_mtls_endpoint(None) is None + assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudBuildClient, "grpc"), + (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), +]) +def test_cloud_build_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CloudBuildGrpcTransport, "grpc"), + (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudBuildRestTransport, "rest"), +]) +def test_cloud_build_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudBuildClient, "grpc"), + (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), +]) +def test_cloud_build_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +def test_cloud_build_client_get_transport_class(): + transport = CloudBuildClient.get_transport_class() + available_transports = [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildRestTransport, + ] + assert transport in available_transports + + transport = CloudBuildClient.get_transport_class("grpc") + assert transport == transports.CloudBuildGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +def test_cloud_build_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_build_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CloudBuildClient, CloudBuildAsyncClient +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), +]) +def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), +]) +def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_cloud_build_client_client_options_from_dict(): + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudBuildClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cloud_build_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildRequest, + dict, +]) +def test_create_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + client.create_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + +@pytest.mark.asyncio +async def test_create_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_build_async_from_dict(): + await test_create_build_async(request_type=dict) + +def test_create_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateBuildRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_build( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].build + mock_val = cloudbuild.Build(name='name_value') + assert arg == mock_val + + +def test_create_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_build( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].build + mock_val = cloudbuild.Build(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildRequest, + dict, +]) +def test_get_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + response = client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_get_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + client.get_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + +@pytest.mark.asyncio +async def test_get_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + )) + response = await client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_build_async_from_dict(): + await test_get_build_async(request_type=dict) + +def test_get_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + call.return_value = cloudbuild.Build() + client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_get_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_get_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildsRequest, + dict, +]) +def test_list_builds(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_builds_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + client.list_builds() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + +@pytest.mark.asyncio +async def test_list_builds_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildsRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_builds_async_from_dict(): + await test_list_builds_async(request_type=dict) + +def test_list_builds_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListBuildsRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + call.return_value = cloudbuild.ListBuildsResponse() + client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_builds_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_builds( + project_id='project_id_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_list_builds_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_list_builds_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_builds( + project_id='project_id_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_builds_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + + +def test_list_builds_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_builds(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in results) +def test_list_builds_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + pages = list(client.list_builds(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_builds_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_builds(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_builds_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_builds(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CancelBuildRequest, + dict, +]) +def test_cancel_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + response = client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_cancel_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + client.cancel_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + +@pytest.mark.asyncio +async def test_cancel_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CancelBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + )) + response = await client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_cancel_build_async_from_dict(): + await test_cancel_build_async(request_type=dict) + +def test_cancel_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CancelBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + call.return_value = cloudbuild.Build() + client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_cancel_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_cancel_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_cancel_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RetryBuildRequest, + dict, +]) +def test_retry_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_retry_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + client.retry_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + +@pytest.mark.asyncio +async def test_retry_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RetryBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_retry_build_async_from_dict(): + await test_retry_build_async(request_type=dict) + +def test_retry_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.RetryBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_retry_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retry_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_retry_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_retry_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retry_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_retry_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ApproveBuildRequest, + dict, +]) +def test_approve_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_approve_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + client.approve_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + +@pytest.mark.asyncio +async def test_approve_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ApproveBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_approve_build_async_from_dict(): + await test_approve_build_async(request_type=dict) + +def test_approve_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ApproveBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_approve_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.approve_build( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].approval_result + mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') + assert arg == mock_val + + +def test_approve_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + +@pytest.mark.asyncio +async def test_approve_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.approve_build( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].approval_result + mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_approve_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildTriggerRequest, + dict, +]) +def test_create_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_create_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + client.create_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_create_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_create_build_trigger_async_from_dict(): + await test_create_build_trigger_async(request_type=dict) + +def test_create_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateBuildTriggerRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_build_trigger( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + + +def test_create_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_build_trigger( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildTriggerRequest, + dict, +]) +def test_get_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_get_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + client.get_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_get_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_build_trigger_async_from_dict(): + await test_get_build_trigger_async(request_type=dict) + +def test_get_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + + +def test_get_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_get_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildTriggersRequest, + dict, +]) +def test_list_build_triggers(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_build_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + client.list_build_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + +@pytest.mark.asyncio +async def test_list_build_triggers_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildTriggersRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_build_triggers_async_from_dict(): + await test_list_build_triggers_async(request_type=dict) + +def test_list_build_triggers_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListBuildTriggersRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + call.return_value = cloudbuild.ListBuildTriggersResponse() + client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_build_triggers_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_build_triggers( + project_id='project_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + + +def test_list_build_triggers_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + +@pytest.mark.asyncio +async def test_list_build_triggers_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_build_triggers( + project_id='project_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_build_triggers_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + + +def test_list_build_triggers_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_build_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in results) +def test_list_build_triggers_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_build_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_build_triggers_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_build_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_build_triggers_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_build_triggers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteBuildTriggerRequest, + dict, +]) +def test_delete_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + client.delete_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_build_trigger_async_from_dict(): + await test_delete_build_trigger_async(request_type=dict) + +def test_delete_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.DeleteBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + call.return_value = None + client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_delete_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + + +def test_delete_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_delete_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateBuildTriggerRequest, + dict, +]) +def test_update_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_update_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + client.update_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_update_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_update_build_trigger_async_from_dict(): + await test_update_build_trigger_async(request_type=dict) + +def test_update_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.UpdateBuildTriggerRequest(**{"trigger": {"resource_name": "projects/sample1/locations/sample2/triggers/sample3"}}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_update_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + + +def test_update_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + +@pytest.mark.asyncio +async def test_update_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RunBuildTriggerRequest, + dict, +]) +def test_run_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_run_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + client.run_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_run_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RunBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_run_build_trigger_async_from_dict(): + await test_run_build_trigger_async(request_type=dict) + +def test_run_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.RunBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_run_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].source + mock_val = cloudbuild.RepoSource(project_id='project_id_value') + assert arg == mock_val + + +def test_run_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + +@pytest.mark.asyncio +async def test_run_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].source + mock_val = cloudbuild.RepoSource(project_id='project_id_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ReceiveTriggerWebhookRequest, + dict, +]) +def test_receive_trigger_webhook(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ReceiveTriggerWebhookResponse( + ) + response = client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +def test_receive_trigger_webhook_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + client.receive_trigger_webhook() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ReceiveTriggerWebhookRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse( + )) + response = await client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_async_from_dict(): + await test_receive_trigger_webhook_async(request_type=dict) + + +def test_receive_trigger_webhook_field_headers(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ReceiveTriggerWebhookRequest() + + request.project_id = 'project_id_value' + request.trigger = 'trigger_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + call.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&trigger=trigger_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_field_headers_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ReceiveTriggerWebhookRequest() + + request.project_id = 'project_id_value' + request.trigger = 'trigger_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse()) + await client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&trigger=trigger_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateWorkerPoolRequest, + dict, +]) +def test_create_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + client.create_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_create_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_worker_pool_async_from_dict(): + await test_create_worker_pool_async(request_type=dict) + +def test_create_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateWorkerPoolRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_worker_pool( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].worker_pool_id + mock_val = 'worker_pool_id_value' + assert arg == mock_val + + +def test_create_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + +@pytest.mark.asyncio +async def test_create_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_worker_pool( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].worker_pool_id + mock_val = 'worker_pool_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetWorkerPoolRequest, + dict, +]) +def test_get_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + ) + response = client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +def test_get_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + client.get_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_get_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + )) + response = await client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_worker_pool_async_from_dict(): + await test_get_worker_pool_async(request_type=dict) + +def test_get_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + call.return_value = cloudbuild.WorkerPool() + client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteWorkerPoolRequest, + dict, +]) +def test_delete_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + client.delete_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_worker_pool_async_from_dict(): + await test_delete_worker_pool_async(request_type=dict) + +def test_delete_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.DeleteWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_delete_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateWorkerPoolRequest, + dict, +]) +def test_update_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + client.update_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_update_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_worker_pool_async_from_dict(): + await test_update_worker_pool_async(request_type=dict) + +def test_update_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.UpdateWorkerPoolRequest(**{"worker_pool": {"name": "projects/sample1/locations/sample2/workerPools/sample3"}}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_update_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_worker_pool( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_worker_pool( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListWorkerPoolsRequest, + dict, +]) +def test_list_worker_pools(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_worker_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + client.list_worker_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + +@pytest.mark.asyncio +async def test_list_worker_pools_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListWorkerPoolsRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_worker_pools_async_from_dict(): + await test_list_worker_pools_async(request_type=dict) + +def test_list_worker_pools_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListWorkerPoolsRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + call.return_value = cloudbuild.ListWorkerPoolsResponse() + client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_worker_pools_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_worker_pools( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_worker_pools_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_worker_pools_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_worker_pools( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_worker_pools_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + + +def test_list_worker_pools_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_worker_pools(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in results) +def test_list_worker_pools_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + pages = list(client.list_worker_pools(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_worker_pools_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_worker_pools(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_worker_pools_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_worker_pools(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildRequest, + dict, +]) +def test_create_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parent", )) & set(("projectId", "build", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.CreateBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build(request) + + +def test_create_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) + + +def test_create_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + +def test_create_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildRequest, + dict, +]) +def test_get_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_get_build_rest_required_fields(request_type=cloudbuild.GetBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.GetBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.get_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build(request) + + +def test_get_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}" % client.transport._host, args[1]) + + +def test_get_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_get_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildsRequest, + dict, +]) +def test_list_builds_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_builds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_builds_rest_required_fields(request_type=cloudbuild.ListBuildsRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", "parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_builds(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_builds_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_builds._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "parent", )) & set(("projectId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_builds_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_builds") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_builds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildsResponse.to_json(cloudbuild.ListBuildsResponse()) + + request = cloudbuild.ListBuildsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildsResponse() + + client.list_builds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_builds_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildsRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_builds(request) + + +def test_list_builds_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_builds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) + + +def test_list_builds_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + + +def test_list_builds_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListBuildsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1'} + + pager = client.list_builds(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in results) + + pages = list(client.list_builds(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CancelBuildRequest, + dict, +]) +def test_cancel_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.cancel_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_cancel_build_rest_required_fields(request_type=cloudbuild.CancelBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_cancel_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_cancel_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.CancelBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.cancel_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CancelBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_build(request) + + +def test_cancel_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.cancel_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:cancel" % client.transport._host, args[1]) + + +def test_cancel_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_cancel_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RetryBuildRequest, + dict, +]) +def test_retry_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.retry_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_retry_build_rest_required_fields(request_type=cloudbuild.RetryBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.retry_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_retry_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.retry_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retry_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_retry_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_retry_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.RetryBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.retry_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retry_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RetryBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retry_build(request) + + +def test_retry_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.retry_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:retry" % client.transport._host, args[1]) + + +def test_retry_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_retry_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ApproveBuildRequest, + dict, +]) +def test_approve_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/builds/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.approve_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_approve_build_rest_required_fields(request_type=cloudbuild.ApproveBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.approve_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_approve_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.approve_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_approve_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_approve_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_approve_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.ApproveBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.approve_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_approve_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ApproveBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/builds/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.approve_build(request) + + +def test_approve_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/builds/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.approve_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/builds/*}:approve" % client.transport._host, args[1]) + + +def test_approve_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + +def test_approve_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildTriggerRequest, + dict, +]) +def test_create_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_create_build_trigger_rest_required_fields(request_type=cloudbuild.CreateBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parent", )) & set(("projectId", "trigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildTriggerRequest.pb(cloudbuild.CreateBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.CreateBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.create_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build_trigger(request) + + +def test_create_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) + + +def test_create_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +def test_create_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildTriggerRequest, + dict, +]) +def test_get_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_get_build_trigger_rest_required_fields(request_type=cloudbuild.GetBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildTriggerRequest.pb(cloudbuild.GetBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.GetBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.get_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build_trigger(request) + + +def test_get_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_get_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +def test_get_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildTriggersRequest, + dict, +]) +def test_list_build_triggers_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_build_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_build_triggers_rest_required_fields(request_type=cloudbuild.ListBuildTriggersRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", "parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_build_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_build_triggers_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_build_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", "parent", )) & set(("projectId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_build_triggers_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_build_triggers") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_build_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildTriggersRequest.pb(cloudbuild.ListBuildTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildTriggersResponse.to_json(cloudbuild.ListBuildTriggersResponse()) + + request = cloudbuild.ListBuildTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildTriggersResponse() + + client.list_build_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_build_triggers_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildTriggersRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_build_triggers(request) + + +def test_list_build_triggers_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_build_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) + + +def test_list_build_triggers_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + + +def test_list_build_triggers_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListBuildTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1'} + + pager = client.list_build_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in results) + + pages = list(client.list_build_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteBuildTriggerRequest, + dict, +]) +def test_delete_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_build_trigger_rest_required_fields(request_type=cloudbuild.DeleteBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_build_trigger") as pre: + pre.assert_not_called() + pb_message = cloudbuild.DeleteBuildTriggerRequest.pb(cloudbuild.DeleteBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudbuild.DeleteBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_build_trigger(request) + + +def test_delete_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_delete_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +def test_delete_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateBuildTriggerRequest, + dict, +]) +def test_update_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_update_build_trigger_rest_required_fields(request_type=cloudbuild.UpdateBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "triggerId", "trigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateBuildTriggerRequest.pb(cloudbuild.UpdateBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.UpdateBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.update_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_build_trigger(request) + + +def test_update_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_update_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +def test_update_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RunBuildTriggerRequest, + dict, +]) +def test_run_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_run_build_trigger_rest_required_fields(request_type=cloudbuild.RunBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.run_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_run_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_run_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RunBuildTriggerRequest.pb(cloudbuild.RunBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.RunBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.run_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RunBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_build_trigger(request) + + +def test_run_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.run_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}:run" % client.transport._host, args[1]) + + +def test_run_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + +def test_run_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ReceiveTriggerWebhookRequest, + dict, +]) +def test_receive_trigger_webhook_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger': 'sample2'} + request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ReceiveTriggerWebhookResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ReceiveTriggerWebhookResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.receive_trigger_webhook(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_receive_trigger_webhook_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb(cloudbuild.ReceiveTriggerWebhookRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ReceiveTriggerWebhookResponse.to_json(cloudbuild.ReceiveTriggerWebhookResponse()) + + request = cloudbuild.ReceiveTriggerWebhookRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + + client.receive_trigger_webhook(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_receive_trigger_webhook_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ReceiveTriggerWebhookRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger': 'sample2'} + request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.receive_trigger_webhook(request) + + +def test_receive_trigger_webhook_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateWorkerPoolRequest, + dict, +]) +def test_create_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_worker_pool_rest_required_fields(request_type=cloudbuild.CreateWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["worker_pool_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "workerPoolId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == request_init["worker_pool_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["workerPoolId"] = 'worker_pool_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", "worker_pool_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == 'worker_pool_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_worker_pool(request) + + expected_params = [ + ( + "workerPoolId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", "workerPoolId", )) & set(("parent", "workerPool", "workerPoolId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateWorkerPoolRequest.pb(cloudbuild.CreateWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.CreateWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_worker_pool(request) + + +def test_create_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) + + +def test_create_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + +def test_create_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetWorkerPoolRequest, + dict, +]) +def test_get_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_worker_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +def test_get_worker_pool_rest_required_fields(request_type=cloudbuild.GetWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetWorkerPoolRequest.pb(cloudbuild.GetWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.WorkerPool.to_json(cloudbuild.WorkerPool()) + + request = cloudbuild.GetWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.WorkerPool() + + client.get_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_worker_pool(request) + + +def test_get_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_get_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + + +def test_get_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteWorkerPoolRequest, + dict, +]) +def test_delete_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_worker_pool_rest_required_fields(request_type=cloudbuild.DeleteWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_delete_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.DeleteWorkerPoolRequest.pb(cloudbuild.DeleteWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.DeleteWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_worker_pool(request) + + +def test_delete_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_delete_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + + +def test_delete_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateWorkerPoolRequest, + dict, +]) +def test_update_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_worker_pool_rest_required_fields(request_type=cloudbuild.UpdateWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("workerPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateWorkerPoolRequest.pb(cloudbuild.UpdateWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.UpdateWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_worker_pool(request) + + +def test_update_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_update_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListWorkerPoolsRequest, + dict, +]) +def test_list_worker_pools_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_worker_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_worker_pools_rest_required_fields(request_type=cloudbuild.ListWorkerPoolsRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_worker_pools(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_worker_pools_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_worker_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_worker_pools_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_worker_pools") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_worker_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListWorkerPoolsRequest.pb(cloudbuild.ListWorkerPoolsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListWorkerPoolsResponse.to_json(cloudbuild.ListWorkerPoolsResponse()) + + request = cloudbuild.ListWorkerPoolsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListWorkerPoolsResponse() + + client.list_worker_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_worker_pools_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListWorkerPoolsRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_worker_pools(request) + + +def test_list_worker_pools_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_worker_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) + + +def test_list_worker_pools_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + + +def test_list_worker_pools_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListWorkerPoolsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_worker_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in results) + + pages = list(client.list_worker_pools(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudBuildClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudBuildGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = CloudBuildClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudBuildGrpcTransport, + ) + +def test_cloud_build_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_build_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_build', + 'get_build', + 'list_builds', + 'cancel_build', + 'retry_build', + 'approve_build', + 'create_build_trigger', + 'get_build_trigger', + 'list_build_triggers', + 'delete_build_trigger', + 'update_build_trigger', + 'run_build_trigger', + 'receive_trigger_webhook', + 'create_worker_pool', + 'get_worker_pool', + 'delete_worker_pool', + 'update_worker_pool', + 'list_worker_pools', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_build_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cloud_build_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport() + adc.assert_called_once() + + +def test_cloud_build_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudBuildClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + ], +) +def test_cloud_build_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, + ], +) +def test_cloud_build_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudBuildGrpcTransport, grpc_helpers), + (transports.CloudBuildGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_build_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cloud_build_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudBuildRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_build_rest_lro_client(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_build_host_no_port(transport_name): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_build_host_with_port(transport_name): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_build_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudBuildClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudBuildClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_build._session + session2 = client2.transport.create_build._session + assert session1 != session2 + session1 = client1.transport.get_build._session + session2 = client2.transport.get_build._session + assert session1 != session2 + session1 = client1.transport.list_builds._session + session2 = client2.transport.list_builds._session + assert session1 != session2 + session1 = client1.transport.cancel_build._session + session2 = client2.transport.cancel_build._session + assert session1 != session2 + session1 = client1.transport.retry_build._session + session2 = client2.transport.retry_build._session + assert session1 != session2 + session1 = client1.transport.approve_build._session + session2 = client2.transport.approve_build._session + assert session1 != session2 + session1 = client1.transport.create_build_trigger._session + session2 = client2.transport.create_build_trigger._session + assert session1 != session2 + session1 = client1.transport.get_build_trigger._session + session2 = client2.transport.get_build_trigger._session + assert session1 != session2 + session1 = client1.transport.list_build_triggers._session + session2 = client2.transport.list_build_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_build_trigger._session + session2 = client2.transport.delete_build_trigger._session + assert session1 != session2 + session1 = client1.transport.update_build_trigger._session + session2 = client2.transport.update_build_trigger._session + assert session1 != session2 + session1 = client1.transport.run_build_trigger._session + session2 = client2.transport.run_build_trigger._session + assert session1 != session2 + session1 = client1.transport.receive_trigger_webhook._session + session2 = client2.transport.receive_trigger_webhook._session + assert session1 != session2 + session1 = client1.transport.create_worker_pool._session + session2 = client2.transport.create_worker_pool._session + assert session1 != session2 + session1 = client1.transport.get_worker_pool._session + session2 = client2.transport.get_worker_pool._session + assert session1 != session2 + session1 = client1.transport.delete_worker_pool._session + session2 = client2.transport.delete_worker_pool._session + assert session1 != session2 + session1 = client1.transport.update_worker_pool._session + session2 = client2.transport.update_worker_pool._session + assert session1 != session2 + session1 = client1.transport.list_worker_pools._session + session2 = client2.transport.list_worker_pools._session + assert session1 != session2 +def test_cloud_build_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudBuildGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_build_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudBuildGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_build_grpc_lro_client(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_build_grpc_lro_async_client(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_build_path(): + project = "squid" + build = "clam" + expected = "projects/{project}/builds/{build}".format(project=project, build=build, ) + actual = CloudBuildClient.build_path(project, build) + assert expected == actual + + +def test_parse_build_path(): + expected = { + "project": "whelk", + "build": "octopus", + } + path = CloudBuildClient.build_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_path(path) + assert expected == actual + +def test_build_trigger_path(): + project = "oyster" + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + actual = CloudBuildClient.build_trigger_path(project, location, trigger) + assert expected == actual + + +def test_parse_build_trigger_path(): + expected = { + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", + } + path = CloudBuildClient.build_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_trigger_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "scallop" + location = "abalone" + keyring = "squid" + key = "clam" + expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) + actual = CloudBuildClient.crypto_key_path(project, location, keyring, key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "whelk", + "location": "octopus", + "keyring": "oyster", + "key": "nudibranch", + } + path = CloudBuildClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_crypto_key_path(path) + assert expected == actual + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = CloudBuildClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = CloudBuildClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_network_path(path) + assert expected == actual + +def test_repository_path(): + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" + expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + actual = CloudBuildClient.repository_path(project, location, connection, repository) + assert expected == actual + + +def test_parse_repository_path(): + expected = { + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", + } + path = CloudBuildClient.repository_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_repository_path(path) + assert expected == actual + +def test_secret_version_path(): + project = "cuttlefish" + secret = "mussel" + version = "winkle" + expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + actual = CloudBuildClient.secret_version_path(project, secret, version) + assert expected == actual + + +def test_parse_secret_version_path(): + expected = { + "project": "nautilus", + "secret": "scallop", + "version": "abalone", + } + path = CloudBuildClient.secret_version_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_secret_version_path(path) + assert expected == actual + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = CloudBuildClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = CloudBuildClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_service_account_path(path) + assert expected == actual + +def test_subscription_path(): + project = "oyster" + subscription = "nudibranch" + expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + actual = CloudBuildClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "cuttlefish", + "subscription": "mussel", + } + path = CloudBuildClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_subscription_path(path) + assert expected == actual + +def test_topic_path(): + project = "winkle" + topic = "nautilus" + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + actual = CloudBuildClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "scallop", + "topic": "abalone", + } + path = CloudBuildClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_topic_path(path) + assert expected == actual + +def test_worker_pool_path(): + project = "squid" + location = "clam" + worker_pool = "whelk" + expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) + actual = CloudBuildClient.worker_pool_path(project, location, worker_pool) + assert expected == actual + + +def test_parse_worker_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "worker_pool": "nudibranch", + } + path = CloudBuildClient.worker_pool_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_worker_pool_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudBuildClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudBuildClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudBuildClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudBuildClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudBuildClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudBuildClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = CloudBuildClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudBuildClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudBuildClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudBuildClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudBuildClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..a0cf72db --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/devtools/cloudbuild/__init__.py + google/cloud/devtools/cloudbuild/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..6f731ec0 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/devtools/cloudbuild *.py +recursive-include google/cloud/devtools/cloudbuild_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..c788a1b3 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Devtools Cloudbuild API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Devtools Cloudbuild API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst new file mode 100644 index 00000000..f4d9c5e2 --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst @@ -0,0 +1,10 @@ +RepositoryManager +----------------------------------- + +.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst new file mode 100644 index 00000000..c055be1a --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Devtools Cloudbuild v2 API +==================================================== +.. toctree:: + :maxdepth: 2 + + repository_manager diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst new file mode 100644 index 00000000..2148aa78 --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Devtools Cloudbuild v2 API +================================================= + +.. automodule:: google.cloud.devtools.cloudbuild_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..4bd8e2dd --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-build documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-build" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Devtools Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-build-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-build.tex", + u"google-cloud-build Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-build", + u"Google Cloud Devtools Cloudbuild Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-build", + u"google-cloud-build Documentation", + author, + "google-cloud-build", + "GAPIC library for Google Cloud Devtools Cloudbuild API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..476758ee --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudbuild_v2/services + cloudbuild_v2/types diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py new file mode 100644 index 00000000..47a5d13c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.devtools.cloudbuild_v2.services.repository_manager.client import RepositoryManagerClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager.async_client import RepositoryManagerAsyncClient + +from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import OperationMetadata +from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import RunWorkflowCustomOperationMetadata +from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import Connection +from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import GetConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import GetRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubEnterpriseConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitLabConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import InstallationState +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import OAuthCredential +from google.cloud.devtools.cloudbuild_v2.types.repositories import ProcessWebhookRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import Repository +from google.cloud.devtools.cloudbuild_v2.types.repositories import ServiceDirectoryConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import UpdateConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import UserCredential + +__all__ = ('RepositoryManagerClient', + 'RepositoryManagerAsyncClient', + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'Connection', + 'CreateConnectionRequest', + 'CreateRepositoryRequest', + 'DeleteConnectionRequest', + 'DeleteRepositoryRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenRequest', + 'FetchReadWriteTokenResponse', + 'GetConnectionRequest', + 'GetRepositoryRequest', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'InstallationState', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'OAuthCredential', + 'ProcessWebhookRequest', + 'Repository', + 'ServiceDirectoryConfig', + 'UpdateConnectionRequest', + 'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py new file mode 100644 index 00000000..6745dc72 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.repository_manager import RepositoryManagerClient +from .services.repository_manager import RepositoryManagerAsyncClient + +from .types.cloudbuild import OperationMetadata +from .types.cloudbuild import RunWorkflowCustomOperationMetadata +from .types.repositories import BatchCreateRepositoriesRequest +from .types.repositories import BatchCreateRepositoriesResponse +from .types.repositories import Connection +from .types.repositories import CreateConnectionRequest +from .types.repositories import CreateRepositoryRequest +from .types.repositories import DeleteConnectionRequest +from .types.repositories import DeleteRepositoryRequest +from .types.repositories import FetchGitRefsRequest +from .types.repositories import FetchGitRefsResponse +from .types.repositories import FetchLinkableRepositoriesRequest +from .types.repositories import FetchLinkableRepositoriesResponse +from .types.repositories import FetchReadTokenRequest +from .types.repositories import FetchReadTokenResponse +from .types.repositories import FetchReadWriteTokenRequest +from .types.repositories import FetchReadWriteTokenResponse +from .types.repositories import GetConnectionRequest +from .types.repositories import GetRepositoryRequest +from .types.repositories import GitHubConfig +from .types.repositories import GitHubEnterpriseConfig +from .types.repositories import GitLabConfig +from .types.repositories import InstallationState +from .types.repositories import ListConnectionsRequest +from .types.repositories import ListConnectionsResponse +from .types.repositories import ListRepositoriesRequest +from .types.repositories import ListRepositoriesResponse +from .types.repositories import OAuthCredential +from .types.repositories import ProcessWebhookRequest +from .types.repositories import Repository +from .types.repositories import ServiceDirectoryConfig +from .types.repositories import UpdateConnectionRequest +from .types.repositories import UserCredential + +__all__ = ( + 'RepositoryManagerAsyncClient', +'BatchCreateRepositoriesRequest', +'BatchCreateRepositoriesResponse', +'Connection', +'CreateConnectionRequest', +'CreateRepositoryRequest', +'DeleteConnectionRequest', +'DeleteRepositoryRequest', +'FetchGitRefsRequest', +'FetchGitRefsResponse', +'FetchLinkableRepositoriesRequest', +'FetchLinkableRepositoriesResponse', +'FetchReadTokenRequest', +'FetchReadTokenResponse', +'FetchReadWriteTokenRequest', +'FetchReadWriteTokenResponse', +'GetConnectionRequest', +'GetRepositoryRequest', +'GitHubConfig', +'GitHubEnterpriseConfig', +'GitLabConfig', +'InstallationState', +'ListConnectionsRequest', +'ListConnectionsResponse', +'ListRepositoriesRequest', +'ListRepositoriesResponse', +'OAuthCredential', +'OperationMetadata', +'ProcessWebhookRequest', +'Repository', +'RepositoryManagerClient', +'RunWorkflowCustomOperationMetadata', +'ServiceDirectoryConfig', +'UpdateConnectionRequest', +'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json new file mode 100644 index 00000000..2e77ddd0 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.devtools.cloudbuild_v2", + "protoPackage": "google.devtools.cloudbuild.v2", + "schema": "1.0", + "services": { + "RepositoryManager": { + "clients": { + "grpc": { + "libraryClient": "RepositoryManagerClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RepositoryManagerAsyncClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "rest": { + "libraryClient": "RepositoryManagerClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py new file mode 100644 index 00000000..89a37dc9 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py new file mode 100644 index 00000000..4477dbda --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RepositoryManagerClient +from .async_client import RepositoryManagerAsyncClient + +__all__ = ( + 'RepositoryManagerClient', + 'RepositoryManagerAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py new file mode 100644 index 00000000..f0355efe --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py @@ -0,0 +1,2257 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .client import RepositoryManagerClient + + +class RepositoryManagerAsyncClient: + """Manages connections to source code repositories.""" + + _client: RepositoryManagerClient + + DEFAULT_ENDPOINT = RepositoryManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RepositoryManagerClient.DEFAULT_MTLS_ENDPOINT + + connection_path = staticmethod(RepositoryManagerClient.connection_path) + parse_connection_path = staticmethod(RepositoryManagerClient.parse_connection_path) + repository_path = staticmethod(RepositoryManagerClient.repository_path) + parse_repository_path = staticmethod(RepositoryManagerClient.parse_repository_path) + secret_version_path = staticmethod(RepositoryManagerClient.secret_version_path) + parse_secret_version_path = staticmethod(RepositoryManagerClient.parse_secret_version_path) + service_path = staticmethod(RepositoryManagerClient.service_path) + parse_service_path = staticmethod(RepositoryManagerClient.parse_service_path) + common_billing_account_path = staticmethod(RepositoryManagerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(RepositoryManagerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(RepositoryManagerClient.common_folder_path) + parse_common_folder_path = staticmethod(RepositoryManagerClient.parse_common_folder_path) + common_organization_path = staticmethod(RepositoryManagerClient.common_organization_path) + parse_common_organization_path = staticmethod(RepositoryManagerClient.parse_common_organization_path) + common_project_path = staticmethod(RepositoryManagerClient.common_project_path) + parse_common_project_path = staticmethod(RepositoryManagerClient.parse_common_project_path) + common_location_path = staticmethod(RepositoryManagerClient.common_location_path) + parse_common_location_path = staticmethod(RepositoryManagerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerAsyncClient: The constructed client. + """ + return RepositoryManagerClient.from_service_account_info.__func__(RepositoryManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerAsyncClient: The constructed client. + """ + return RepositoryManagerClient.from_service_account_file.__func__(RepositoryManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RepositoryManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RepositoryManagerTransport: + """Returns the transport used by the client instance. + + Returns: + RepositoryManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, RepositoryManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the repository manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.RepositoryManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RepositoryManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_connection(self, + request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[repositories.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]]): + The request object. Message for creating a Connection + parent (:class:`str`): + Required. Project and location where the connection will + be created. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): + Required. The Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (:class:`str`): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. + Allows alphanumeric characters and any of + -._~%!$&'()*+,;=@. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.CreateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_connection(self, + request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Connection: + r"""Gets details of a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]]): + The request object. Message for getting the details of a + Connection. + name (:class:`str`): + Required. The name of the Connection to retrieve. + Format: ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.GetConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connections(self, + request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]]): + The request object. Message for requesting list of + Connections. + parent (:class:`str`): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager: + Message for response to listing + Connections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_connections, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_connection(self, + request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, + *, + connection: Optional[repositories.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]]): + The request object. Message for updating a Connection. + connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): + Required. The Connection to update. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.UpdateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection.name", request.connection.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_connection(self, + request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]]): + The request object. Message for deleting a Connection. + name (:class:`str`): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.DeleteConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_repository(self, + request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + repository: Optional[repositories.Repository] = None, + repository_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]]): + The request object. Message for creating a Repository. + parent (:class:`str`): + Required. The connection to contain + the repository. If the request is part + of a BatchCreateRepositoriesRequest, + this field should be empty or match the + parent specified there. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository (:class:`google.cloud.devtools.cloudbuild_v2.types.Repository`): + Required. The repository to create. + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository_id (:class:`str`): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``repository_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` + A repository associated to a parent connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, repository, repository_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.CreateRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if repository is not None: + request.repository = repository + if repository_id is not None: + request.repository_id = repository_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_repository, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Repository, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_repositories(self, + request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates multiple repositories inside a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]]): + The request object. Message for creating repositoritories + in batch. + parent (:class:`str`): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field + in the CreateRepositoryRequest messages must either be + empty or match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]`): + Required. The request messages + specifying the repositories to create. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` + Message for response of creating repositories in batch. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.BatchCreateRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_repositories, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.BatchCreateRepositoriesResponse, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_repository(self, + request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Repository: + r"""Gets details of a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_repository(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]]): + The request object. Message for getting the details of a + Repository. + name (:class:`str`): + Required. The name of the Repository to retrieve. + Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Repository: + A repository associated to a parent + connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.GetRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_repository, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_repositories(self, + request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesAsyncPager: + r"""Lists Repositories in a given connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]]): + The request object. Message for requesting list of + Repositories. + parent (:class:`str`): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager: + Message for response to listing + Repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.ListRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRepositoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_repository(self, + request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]]): + The request object. Message for deleting a Repository. + name (:class:`str`): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.DeleteRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_repository, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def fetch_read_write_token(self, + request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]]): + The request object. Message for fetching SCM read/write + token. + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchReadWriteTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_read_write_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_read_token(self, + request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadTokenResponse: + r"""Fetches read token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]]): + The request object. Message for fetching SCM read token. + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchReadTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_read_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_linkable_repositories(self, + request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableRepositoriesAsyncPager: + r"""FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]]): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager: + Response message for + FetchLinkableRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = repositories.FetchLinkableRepositoriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_linkable_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection", request.connection), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchLinkableRepositoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_git_refs(self, + request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchGitRefsResponse: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_git_refs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]]): + The request object. Request for fetching git refs + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: + Response for fetching git refs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchGitRefsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_git_refs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RepositoryManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RepositoryManagerAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py new file mode 100644 index 00000000..79743795 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -0,0 +1,2445 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import RepositoryManagerGrpcTransport +from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .transports.rest import RepositoryManagerRestTransport + + +class RepositoryManagerClientMeta(type): + """Metaclass for the RepositoryManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] + _transport_registry["grpc"] = RepositoryManagerGrpcTransport + _transport_registry["grpc_asyncio"] = RepositoryManagerGrpcAsyncIOTransport + _transport_registry["rest"] = RepositoryManagerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RepositoryManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RepositoryManagerClient(metaclass=RepositoryManagerClientMeta): + """Manages connections to source code repositories.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RepositoryManagerTransport: + """Returns the transport used by the client instance. + + Returns: + RepositoryManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_path(project: str,location: str,connection: str,) -> str: + """Returns a fully-qualified connection string.""" + return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str,str]: + """Parses a connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def repository_path(project: str,location: str,connection: str,repository: str,) -> str: + """Returns a fully-qualified repository string.""" + return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + + @staticmethod + def parse_repository_path(path: str) -> Dict[str,str]: + """Parses a repository path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def secret_version_path(project: str,secret: str,version: str,) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str,str]: + """Parses a secret_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_path(project: str,location: str,namespace: str,service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str,str]: + """Parses a service path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RepositoryManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the repository manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RepositoryManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RepositoryManagerTransport): + # transport is a RepositoryManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_connection(self, + request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[repositories.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]): + The request object. Message for creating a Connection + parent (str): + Required. Project and location where the connection will + be created. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (str): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. + Allows alphanumeric characters and any of + -._~%!$&'()*+,;=@. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.CreateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.CreateConnectionRequest): + request = repositories.CreateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_connection(self, + request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Connection: + r"""Gets details of a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]): + The request object. Message for getting the details of a + Connection. + name (str): + Required. The name of the Connection to retrieve. + Format: ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.GetConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.GetConnectionRequest): + request = repositories.GetConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connections(self, + request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]): + The request object. Message for requesting list of + Connections. + parent (str): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager: + Message for response to listing + Connections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.ListConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.ListConnectionsRequest): + request = repositories.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_connection(self, + request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, + *, + connection: Optional[repositories.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]): + The request object. Message for updating a Connection. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to update. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.UpdateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.UpdateConnectionRequest): + request = repositories.UpdateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection.name", request.connection.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_connection(self, + request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]): + The request object. Message for deleting a Connection. + name (str): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.DeleteConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.DeleteConnectionRequest): + request = repositories.DeleteConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_repository(self, + request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + repository: Optional[repositories.Repository] = None, + repository_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]): + The request object. Message for creating a Repository. + parent (str): + Required. The connection to contain + the repository. If the request is part + of a BatchCreateRepositoriesRequest, + this field should be empty or match the + parent specified there. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository (google.cloud.devtools.cloudbuild_v2.types.Repository): + Required. The repository to create. + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``repository_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` + A repository associated to a parent connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, repository, repository_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.CreateRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.CreateRepositoryRequest): + request = repositories.CreateRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if repository is not None: + request.repository = repository + if repository_id is not None: + request.repository_id = repository_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Repository, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_create_repositories(self, + request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates multiple repositories inside a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]): + The request object. Message for creating repositoritories + in batch. + parent (str): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field + in the CreateRepositoryRequest messages must either be + empty or match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): + Required. The request messages + specifying the repositories to create. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` + Message for response of creating repositories in batch. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.BatchCreateRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.BatchCreateRepositoriesRequest): + request = repositories.BatchCreateRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.BatchCreateRepositoriesResponse, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_repository(self, + request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Repository: + r"""Gets details of a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_repository(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]): + The request object. Message for getting the details of a + Repository. + name (str): + Required. The name of the Repository to retrieve. + Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Repository: + A repository associated to a parent + connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.GetRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.GetRepositoryRequest): + request = repositories.GetRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_repositories(self, + request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesPager: + r"""Lists Repositories in a given connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]): + The request object. Message for requesting list of + Repositories. + parent (str): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager: + Message for response to listing + Repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.ListRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.ListRepositoriesRequest): + request = repositories.ListRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRepositoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_repository(self, + request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]): + The request object. Message for deleting a Repository. + name (str): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.DeleteRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.DeleteRepositoryRequest): + request = repositories.DeleteRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def fetch_read_write_token(self, + request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]): + The request object. Message for fetching SCM read/write + token. + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchReadWriteTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchReadWriteTokenRequest): + request = repositories.FetchReadWriteTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_write_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_read_token(self, + request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadTokenResponse: + r"""Fetches read token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]): + The request object. Message for fetching SCM read token. + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchReadTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchReadTokenRequest): + request = repositories.FetchReadTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_linkable_repositories(self, + request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableRepositoriesPager: + r"""FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager: + Response message for + FetchLinkableRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchLinkableRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchLinkableRepositoriesRequest): + request = repositories.FetchLinkableRepositoriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_linkable_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection", request.connection), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchLinkableRepositoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_git_refs(self, + request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchGitRefsResponse: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_git_refs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]): + The request object. Request for fetching git refs + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: + Response for fetching git refs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchGitRefsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchGitRefsRequest): + request = repositories.FetchGitRefsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_git_refs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RepositoryManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RepositoryManagerClient", +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py new file mode 100644 index 00000000..1ae879b6 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.devtools.cloudbuild_v2.types import repositories + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.ListConnectionsResponse], + request: repositories.ListConnectionsRequest, + response: repositories.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.ListConnectionsResponse]], + request: repositories.ListConnectionsRequest, + response: repositories.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRepositoriesPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.ListRepositoriesResponse], + request: repositories.ListRepositoriesRequest, + response: repositories.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Repository]: + for page in self.pages: + yield from page.repositories + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRepositoriesAsyncPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.ListRepositoriesResponse]], + request: repositories.ListRepositoriesRequest, + response: repositories.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Repository]: + async def async_generator(): + async for page in self.pages: + for response in page.repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchLinkableRepositoriesPager: + """A pager for iterating through ``fetch_linkable_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchLinkableRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.FetchLinkableRepositoriesResponse], + request: repositories.FetchLinkableRepositoriesRequest, + response: repositories.FetchLinkableRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.FetchLinkableRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.FetchLinkableRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Repository]: + for page in self.pages: + yield from page.repositories + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchLinkableRepositoriesAsyncPager: + """A pager for iterating through ``fetch_linkable_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchLinkableRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.FetchLinkableRepositoriesResponse]], + request: repositories.FetchLinkableRepositoriesRequest, + response: repositories.FetchLinkableRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.FetchLinkableRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.FetchLinkableRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Repository]: + async def async_generator(): + async for page in self.pages: + for response in page.repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py new file mode 100644 index 00000000..b912a799 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RepositoryManagerTransport +from .grpc import RepositoryManagerGrpcTransport +from .grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .rest import RepositoryManagerRestTransport +from .rest import RepositoryManagerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] +_transport_registry['grpc'] = RepositoryManagerGrpcTransport +_transport_registry['grpc_asyncio'] = RepositoryManagerGrpcAsyncIOTransport +_transport_registry['rest'] = RepositoryManagerRestTransport + +__all__ = ( + 'RepositoryManagerTransport', + 'RepositoryManagerGrpcTransport', + 'RepositoryManagerGrpcAsyncIOTransport', + 'RepositoryManagerRestTransport', + 'RepositoryManagerRestInterceptor', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py new file mode 100644 index 00000000..28b1d7a8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RepositoryManagerTransport(abc.ABC): + """Abstract transport class for RepositoryManager.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudbuild.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_connection: gapic_v1.method.wrap_method( + self.create_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection: gapic_v1.method.wrap_method( + self.get_connection, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection: gapic_v1.method.wrap_method( + self.update_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method.wrap_method( + self.delete_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.create_repository: gapic_v1.method.wrap_method( + self.create_repository, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_create_repositories: gapic_v1.method.wrap_method( + self.batch_create_repositories, + default_timeout=None, + client_info=client_info, + ), + self.get_repository: gapic_v1.method.wrap_method( + self.get_repository, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_repositories: gapic_v1.method.wrap_method( + self.list_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_repository: gapic_v1.method.wrap_method( + self.delete_repository, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_write_token: gapic_v1.method.wrap_method( + self.fetch_read_write_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_token: gapic_v1.method.wrap_method( + self.fetch_read_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_linkable_repositories: gapic_v1.method.wrap_method( + self.fetch_linkable_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_refs: gapic_v1.method.wrap_method( + self.fetch_git_refs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + Union[ + repositories.Connection, + Awaitable[repositories.Connection] + ]]: + raise NotImplementedError() + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + Union[ + repositories.ListConnectionsResponse, + Awaitable[repositories.ListConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + Union[ + repositories.Repository, + Awaitable[repositories.Repository] + ]]: + raise NotImplementedError() + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + Union[ + repositories.ListRepositoriesResponse, + Awaitable[repositories.ListRepositoriesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + Union[ + repositories.FetchReadWriteTokenResponse, + Awaitable[repositories.FetchReadWriteTokenResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + Union[ + repositories.FetchReadTokenResponse, + Awaitable[repositories.FetchReadTokenResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + Union[ + repositories.FetchLinkableRepositoriesResponse, + Awaitable[repositories.FetchLinkableRepositoriesResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + Union[ + repositories.FetchGitRefsResponse, + Awaitable[repositories.FetchGitRefsResponse] + ]]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'RepositoryManagerTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py new file mode 100644 index 00000000..2507acae --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO + + +class RepositoryManagerGrpcTransport(RepositoryManagerTransport): + """gRPC backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create connection method over gRPC. + + Creates a Connection. + + Returns: + Callable[[~.CreateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', + request_serializer=repositories.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + repositories.Connection]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single connection. + + Returns: + Callable[[~.GetConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', + request_serializer=repositories.GetConnectionRequest.serialize, + response_deserializer=repositories.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + repositories.ListConnectionsResponse]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', + request_serializer=repositories.ListConnectionsRequest.serialize, + response_deserializer=repositories.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the update connection method over gRPC. + + Updates a single connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', + request_serializer=repositories.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection'] + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', + request_serializer=repositories.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection'] + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + operations_pb2.Operation]: + r"""Return a callable for the create repository method over gRPC. + + Creates a Repository. + + Returns: + Callable[[~.CreateRepositoryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_repository' not in self._stubs: + self._stubs['create_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', + request_serializer=repositories.CreateRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_repository'] + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the batch create repositories method over gRPC. + + Creates multiple repositories inside a connection. + + Returns: + Callable[[~.BatchCreateRepositoriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_repositories' not in self._stubs: + self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', + request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_create_repositories'] + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + repositories.Repository]: + r"""Return a callable for the get repository method over gRPC. + + Gets details of a single repository. + + Returns: + Callable[[~.GetRepositoryRequest], + ~.Repository]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_repository' not in self._stubs: + self._stubs['get_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', + request_serializer=repositories.GetRepositoryRequest.serialize, + response_deserializer=repositories.Repository.deserialize, + ) + return self._stubs['get_repository'] + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + repositories.ListRepositoriesResponse]: + r"""Return a callable for the list repositories method over gRPC. + + Lists Repositories in a given connection. + + Returns: + Callable[[~.ListRepositoriesRequest], + ~.ListRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_repositories' not in self._stubs: + self._stubs['list_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', + request_serializer=repositories.ListRepositoriesRequest.serialize, + response_deserializer=repositories.ListRepositoriesResponse.deserialize, + ) + return self._stubs['list_repositories'] + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete repository method over gRPC. + + Deletes a single repository. + + Returns: + Callable[[~.DeleteRepositoryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_repository' not in self._stubs: + self._stubs['delete_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', + request_serializer=repositories.DeleteRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_repository'] + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + repositories.FetchReadWriteTokenResponse]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given repository. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + ~.FetchReadWriteTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_write_token' not in self._stubs: + self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', + request_serializer=repositories.FetchReadWriteTokenRequest.serialize, + response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs['fetch_read_write_token'] + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + repositories.FetchReadTokenResponse]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given repository. + + Returns: + Callable[[~.FetchReadTokenRequest], + ~.FetchReadTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_token' not in self._stubs: + self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', + request_serializer=repositories.FetchReadTokenRequest.serialize, + response_deserializer=repositories.FetchReadTokenResponse.deserialize, + ) + return self._stubs['fetch_read_token'] + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + repositories.FetchLinkableRepositoriesResponse]: + r"""Return a callable for the fetch linkable repositories method over gRPC. + + FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + Returns: + Callable[[~.FetchLinkableRepositoriesRequest], + ~.FetchLinkableRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_linkable_repositories' not in self._stubs: + self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', + request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, + response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, + ) + return self._stubs['fetch_linkable_repositories'] + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + repositories.FetchGitRefsResponse]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + ~.FetchGitRefsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_git_refs' not in self._stubs: + self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', + request_serializer=repositories.FetchGitRefsRequest.serialize, + response_deserializer=repositories.FetchGitRefsResponse.deserialize, + ) + return self._stubs['fetch_git_refs'] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'RepositoryManagerGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py new file mode 100644 index 00000000..55d562a9 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py @@ -0,0 +1,742 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import RepositoryManagerGrpcTransport + + +class RepositoryManagerGrpcAsyncIOTransport(RepositoryManagerTransport): + """gRPC AsyncIO backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create connection method over gRPC. + + Creates a Connection. + + Returns: + Callable[[~.CreateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', + request_serializer=repositories.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + Awaitable[repositories.Connection]]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single connection. + + Returns: + Callable[[~.GetConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', + request_serializer=repositories.GetConnectionRequest.serialize, + response_deserializer=repositories.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + Awaitable[repositories.ListConnectionsResponse]]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', + request_serializer=repositories.ListConnectionsRequest.serialize, + response_deserializer=repositories.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update connection method over gRPC. + + Updates a single connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', + request_serializer=repositories.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection'] + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', + request_serializer=repositories.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection'] + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create repository method over gRPC. + + Creates a Repository. + + Returns: + Callable[[~.CreateRepositoryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_repository' not in self._stubs: + self._stubs['create_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', + request_serializer=repositories.CreateRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_repository'] + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the batch create repositories method over gRPC. + + Creates multiple repositories inside a connection. + + Returns: + Callable[[~.BatchCreateRepositoriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_repositories' not in self._stubs: + self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', + request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_create_repositories'] + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + Awaitable[repositories.Repository]]: + r"""Return a callable for the get repository method over gRPC. + + Gets details of a single repository. + + Returns: + Callable[[~.GetRepositoryRequest], + Awaitable[~.Repository]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_repository' not in self._stubs: + self._stubs['get_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', + request_serializer=repositories.GetRepositoryRequest.serialize, + response_deserializer=repositories.Repository.deserialize, + ) + return self._stubs['get_repository'] + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + Awaitable[repositories.ListRepositoriesResponse]]: + r"""Return a callable for the list repositories method over gRPC. + + Lists Repositories in a given connection. + + Returns: + Callable[[~.ListRepositoriesRequest], + Awaitable[~.ListRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_repositories' not in self._stubs: + self._stubs['list_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', + request_serializer=repositories.ListRepositoriesRequest.serialize, + response_deserializer=repositories.ListRepositoriesResponse.deserialize, + ) + return self._stubs['list_repositories'] + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete repository method over gRPC. + + Deletes a single repository. + + Returns: + Callable[[~.DeleteRepositoryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_repository' not in self._stubs: + self._stubs['delete_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', + request_serializer=repositories.DeleteRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_repository'] + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + Awaitable[repositories.FetchReadWriteTokenResponse]]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given repository. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + Awaitable[~.FetchReadWriteTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_write_token' not in self._stubs: + self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', + request_serializer=repositories.FetchReadWriteTokenRequest.serialize, + response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs['fetch_read_write_token'] + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + Awaitable[repositories.FetchReadTokenResponse]]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given repository. + + Returns: + Callable[[~.FetchReadTokenRequest], + Awaitable[~.FetchReadTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_token' not in self._stubs: + self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', + request_serializer=repositories.FetchReadTokenRequest.serialize, + response_deserializer=repositories.FetchReadTokenResponse.deserialize, + ) + return self._stubs['fetch_read_token'] + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + Awaitable[repositories.FetchLinkableRepositoriesResponse]]: + r"""Return a callable for the fetch linkable repositories method over gRPC. + + FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + Returns: + Callable[[~.FetchLinkableRepositoriesRequest], + Awaitable[~.FetchLinkableRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_linkable_repositories' not in self._stubs: + self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', + request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, + response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, + ) + return self._stubs['fetch_linkable_repositories'] + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + Awaitable[repositories.FetchGitRefsResponse]]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + Awaitable[~.FetchGitRefsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_git_refs' not in self._stubs: + self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', + request_serializer=repositories.FetchGitRefsRequest.serialize, + response_deserializer=repositories.FetchGitRefsResponse.deserialize, + ) + return self._stubs['fetch_git_refs'] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ( + 'RepositoryManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py new file mode 100644 index 00000000..365c9861 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py @@ -0,0 +1,2275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.longrunning import operations_pb2 # type: ignore + +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RepositoryManagerRestInterceptor: + """Interceptor for RepositoryManager. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RepositoryManagerRestTransport. + + .. code-block:: python + class MyCustomRepositoryManagerInterceptor(RepositoryManagerRestInterceptor): + def pre_batch_create_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_git_refs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_git_refs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_linkable_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_linkable_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_write_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_write_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RepositoryManagerRestTransport(interceptor=MyCustomRepositoryManagerInterceptor()) + client = RepositoryManagerClient(transport=transport) + + + """ + def pre_batch_create_repositories(self, request: repositories.BatchCreateRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.BatchCreateRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_batch_create_repositories(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_create_connection(self, request: repositories.CreateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_create_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_create_repository(self, request: repositories.CreateRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_create_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_delete_connection(self, request: repositories.DeleteConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_delete_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_delete_repository(self, request: repositories.DeleteRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_delete_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_git_refs(self, request: repositories.FetchGitRefsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchGitRefsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_git_refs(self, response: repositories.FetchGitRefsResponse) -> repositories.FetchGitRefsResponse: + """Post-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_linkable_repositories(self, request: repositories.FetchLinkableRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchLinkableRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_linkable_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_linkable_repositories(self, response: repositories.FetchLinkableRepositoriesResponse) -> repositories.FetchLinkableRepositoriesResponse: + """Post-rpc interceptor for fetch_linkable_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_read_token(self, request: repositories.FetchReadTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_read_token(self, response: repositories.FetchReadTokenResponse) -> repositories.FetchReadTokenResponse: + """Post-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_read_write_token(self, request: repositories.FetchReadWriteTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadWriteTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_read_write_token(self, response: repositories.FetchReadWriteTokenResponse) -> repositories.FetchReadWriteTokenResponse: + """Post-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_connection(self, request: repositories.GetConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_connection(self, response: repositories.Connection) -> repositories.Connection: + """Post-rpc interceptor for get_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_repository(self, request: repositories.GetRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_repository(self, response: repositories.Repository) -> repositories.Repository: + """Post-rpc interceptor for get_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_list_connections(self, request: repositories.ListConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_list_connections(self, response: repositories.ListConnectionsResponse) -> repositories.ListConnectionsResponse: + """Post-rpc interceptor for list_connections + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_list_repositories(self, request: repositories.ListRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_list_repositories(self, response: repositories.ListRepositoriesResponse) -> repositories.ListRepositoriesResponse: + """Post-rpc interceptor for list_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_update_connection(self, request: repositories.UpdateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_update_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions( + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RepositoryManagerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RepositoryManagerRestInterceptor + + +class RepositoryManagerRestTransport(RepositoryManagerTransport): + """REST backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RepositoryManagerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RepositoryManagerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("BatchCreateRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.BatchCreateRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the batch create repositories method over HTTP. + + Args: + request (~.repositories.BatchCreateRepositoriesRequest): + The request object. Message for creating repositoritories + in batch. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_batch_create_repositories(request, metadata) + pb_request = repositories.BatchCreateRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_repositories(resp) + return resp + + class _CreateConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("CreateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "connectionId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.CreateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create connection method over HTTP. + + Args: + request (~.repositories.CreateConnectionRequest): + The request object. Message for creating a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + 'body': 'connection', + }, + ] + request, metadata = self._interceptor.pre_create_connection(request, metadata) + pb_request = repositories.CreateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection(resp) + return resp + + class _CreateRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("CreateRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "repositoryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.CreateRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create repository method over HTTP. + + Args: + request (~.repositories.CreateRepositoryRequest): + The request object. Message for creating a Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', + 'body': 'repository', + }, + ] + request, metadata = self._interceptor.pre_create_repository(request, metadata) + pb_request = repositories.CreateRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_repository(resp) + return resp + + class _DeleteConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("DeleteConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.DeleteConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete connection method over HTTP. + + Args: + request (~.repositories.DeleteConnectionRequest): + The request object. Message for deleting a Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_connection(request, metadata) + pb_request = repositories.DeleteConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_connection(resp) + return resp + + class _DeleteRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("DeleteRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.DeleteRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete repository method over HTTP. + + Args: + request (~.repositories.DeleteRepositoryRequest): + The request object. Message for deleting a Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_repository(request, metadata) + pb_request = repositories.DeleteRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_repository(resp) + return resp + + class _FetchGitRefs(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchGitRefs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchGitRefsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchGitRefsResponse: + r"""Call the fetch git refs method over HTTP. + + Args: + request (~.repositories.FetchGitRefsRequest): + The request object. Request for fetching git refs + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchGitRefsResponse: + Response for fetching git refs + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs', + }, + ] + request, metadata = self._interceptor.pre_fetch_git_refs(request, metadata) + pb_request = repositories.FetchGitRefsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchGitRefsResponse() + pb_resp = repositories.FetchGitRefsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_git_refs(resp) + return resp + + class _FetchLinkableRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchLinkableRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchLinkableRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchLinkableRepositoriesResponse: + r"""Call the fetch linkable + repositories method over HTTP. + + Args: + request (~.repositories.FetchLinkableRepositoriesRequest): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchLinkableRepositoriesResponse: + Response message for + FetchLinkableRepositories. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{connection=projects/*/locations/*/connections/*}:fetchLinkableRepositories', + }, + ] + request, metadata = self._interceptor.pre_fetch_linkable_repositories(request, metadata) + pb_request = repositories.FetchLinkableRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchLinkableRepositoriesResponse() + pb_resp = repositories.FetchLinkableRepositoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_linkable_repositories(resp) + return resp + + class _FetchReadToken(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchReadToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchReadTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchReadTokenResponse: + r"""Call the fetch read token method over HTTP. + + Args: + request (~.repositories.FetchReadTokenRequest): + The request object. Message for fetching SCM read token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_fetch_read_token(request, metadata) + pb_request = repositories.FetchReadTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchReadTokenResponse() + pb_resp = repositories.FetchReadTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_token(resp) + return resp + + class _FetchReadWriteToken(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchReadWriteToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchReadWriteTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Call the fetch read write token method over HTTP. + + Args: + request (~.repositories.FetchReadWriteTokenRequest): + The request object. Message for fetching SCM read/write + token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_fetch_read_write_token(request, metadata) + pb_request = repositories.FetchReadWriteTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchReadWriteTokenResponse() + pb_resp = repositories.FetchReadWriteTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_write_token(resp) + return resp + + class _GetConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("GetConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.GetConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.Connection: + r"""Call the get connection method over HTTP. + + Args: + request (~.repositories.GetConnectionRequest): + The request object. Message for getting the details of a + Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + ] + request, metadata = self._interceptor.pre_get_connection(request, metadata) + pb_request = repositories.GetConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.Connection() + pb_resp = repositories.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection(resp) + return resp + + class _GetRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("GetRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.GetRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.Repository: + r"""Call the get repository method over HTTP. + + Args: + request (~.repositories.GetRepositoryRequest): + The request object. Message for getting the details of a + Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.Repository: + A repository associated to a parent + connection. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', + }, + ] + request, metadata = self._interceptor.pre_get_repository(request, metadata) + pb_request = repositories.GetRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.Repository() + pb_resp = repositories.Repository.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_repository(resp) + return resp + + class _ListConnections(RepositoryManagerRestStub): + def __hash__(self): + return hash("ListConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.ListConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.ListConnectionsResponse: + r"""Call the list connections method over HTTP. + + Args: + request (~.repositories.ListConnectionsRequest): + The request object. Message for requesting list of + Connections. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.ListConnectionsResponse: + Message for response to listing + Connections. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + }, + ] + request, metadata = self._interceptor.pre_list_connections(request, metadata) + pb_request = repositories.ListConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.ListConnectionsResponse() + pb_resp = repositories.ListConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connections(resp) + return resp + + class _ListRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("ListRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.ListRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.ListRepositoriesResponse: + r"""Call the list repositories method over HTTP. + + Args: + request (~.repositories.ListRepositoriesRequest): + The request object. Message for requesting list of + Repositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.ListRepositoriesResponse: + Message for response to listing + Repositories. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', + }, + ] + request, metadata = self._interceptor.pre_list_repositories(request, metadata) + pb_request = repositories.ListRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.ListRepositoriesResponse() + pb_resp = repositories.ListRepositoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_repositories(resp) + return resp + + class _UpdateConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("UpdateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.UpdateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update connection method over HTTP. + + Args: + request (~.repositories.UpdateConnectionRequest): + The request object. Message for updating a Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{connection.name=projects/*/locations/*/connections/*}', + 'body': 'connection', + }, + ] + request, metadata = self._interceptor.pre_update_connection(request, metadata) + pb_request = repositories.UpdateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection(resp) + return resp + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + repositories.FetchGitRefsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchGitRefs(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + repositories.FetchLinkableRepositoriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchLinkableRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + repositories.FetchReadTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + repositories.FetchReadWriteTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadWriteToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + repositories.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + repositories.Repository]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + repositories.ListConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + repositories.ListRepositoriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:getIamPolicy', + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:setIamPolicy', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:testIamPermissions', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RepositoryManagerRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RepositoryManagerRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RepositoryManagerRestTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py new file mode 100644 index 00000000..1df6a863 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudbuild import ( + OperationMetadata, + RunWorkflowCustomOperationMetadata, +) +from .repositories import ( + BatchCreateRepositoriesRequest, + BatchCreateRepositoriesResponse, + Connection, + CreateConnectionRequest, + CreateRepositoryRequest, + DeleteConnectionRequest, + DeleteRepositoryRequest, + FetchGitRefsRequest, + FetchGitRefsResponse, + FetchLinkableRepositoriesRequest, + FetchLinkableRepositoriesResponse, + FetchReadTokenRequest, + FetchReadTokenResponse, + FetchReadWriteTokenRequest, + FetchReadWriteTokenResponse, + GetConnectionRequest, + GetRepositoryRequest, + GitHubConfig, + GitHubEnterpriseConfig, + GitLabConfig, + InstallationState, + ListConnectionsRequest, + ListConnectionsResponse, + ListRepositoriesRequest, + ListRepositoriesResponse, + OAuthCredential, + ProcessWebhookRequest, + Repository, + ServiceDirectoryConfig, + UpdateConnectionRequest, + UserCredential, +) + +__all__ = ( + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'Connection', + 'CreateConnectionRequest', + 'CreateRepositoryRequest', + 'DeleteConnectionRequest', + 'DeleteRepositoryRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenRequest', + 'FetchReadWriteTokenResponse', + 'GetConnectionRequest', + 'GetRepositoryRequest', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'InstallationState', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'OAuthCredential', + 'ProcessWebhookRequest', + 'Repository', + 'ServiceDirectoryConfig', + 'UpdateConnectionRequest', + 'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py new file mode 100644 index 00000000..a016f0af --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v2', + manifest={ + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + }, +) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class RunWorkflowCustomOperationMetadata(proto.Message): + r"""Represents the custom metadata of the RunWorkflow + long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + verb (str): + Output only. Name of the verb executed by the + operation. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + target (str): + Output only. Server-defined resource path for + the target of the operation. + pipeline_run_id (str): + Output only. ID of the pipeline run created + by RunWorkflow. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + verb: str = proto.Field( + proto.STRING, + number=3, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=4, + ) + api_version: str = proto.Field( + proto.STRING, + number=5, + ) + target: str = proto.Field( + proto.STRING, + number=6, + ) + pipeline_run_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py new file mode 100644 index 00000000..6d5e147a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py @@ -0,0 +1,1104 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v2', + manifest={ + 'Connection', + 'InstallationState', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'ServiceDirectoryConfig', + 'Repository', + 'OAuthCredential', + 'UserCredential', + 'CreateConnectionRequest', + 'GetConnectionRequest', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'UpdateConnectionRequest', + 'DeleteConnectionRequest', + 'CreateRepositoryRequest', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'GetRepositoryRequest', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'DeleteRepositoryRequest', + 'FetchReadWriteTokenRequest', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenResponse', + 'ProcessWebhookRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + }, +) + + +class Connection(proto.Message): + r"""A connection to a SCM like GitHub, GitHub Enterprise, + Bitbucket Server or GitLab. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Immutable. The resource name of the connection, in the + format + ``projects/{project}/locations/{location}/connections/{connection_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was updated. + github_config (google.cloud.devtools.cloudbuild_v2.types.GitHubConfig): + Configuration for connections to github.com. + + This field is a member of `oneof`_ ``connection_config``. + github_enterprise_config (google.cloud.devtools.cloudbuild_v2.types.GitHubEnterpriseConfig): + Configuration for connections to an instance + of GitHub Enterprise. + + This field is a member of `oneof`_ ``connection_config``. + gitlab_config (google.cloud.devtools.cloudbuild_v2.types.GitLabConfig): + Configuration for connections to gitlab.com + or an instance of GitLab Enterprise. + + This field is a member of `oneof`_ ``connection_config``. + installation_state (google.cloud.devtools.cloudbuild_v2.types.InstallationState): + Output only. Installation state of the + Connection. + disabled (bool): + If disabled is set to true, functionality is + disabled for this connection. Repository based + API methods and webhooks processing for + repositories in this connection will be + disabled. + reconciling (bool): + Output only. Set to true when the connection + is being set up or updated in the background. + annotations (MutableMapping[str, str]): + Allows clients to store small amounts of + arbitrary data. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + github_config: 'GitHubConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='connection_config', + message='GitHubConfig', + ) + github_enterprise_config: 'GitHubEnterpriseConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='connection_config', + message='GitHubEnterpriseConfig', + ) + gitlab_config: 'GitLabConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='connection_config', + message='GitLabConfig', + ) + installation_state: 'InstallationState' = proto.Field( + proto.MESSAGE, + number=12, + message='InstallationState', + ) + disabled: bool = proto.Field( + proto.BOOL, + number=13, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=14, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + + +class InstallationState(proto.Message): + r"""Describes stage and necessary actions to be taken by the + user to complete the installation. Used for GitHub and GitHub + Enterprise based connections. + + Attributes: + stage (google.cloud.devtools.cloudbuild_v2.types.InstallationState.Stage): + Output only. Current step of the installation + process. + message (str): + Output only. Message of what the user should + do next to continue the installation. Empty + string if the installation is already complete. + action_uri (str): + Output only. Link to follow for next action. + Empty string if the installation is already + complete. + """ + class Stage(proto.Enum): + r"""Stage of the installation process. + + Values: + STAGE_UNSPECIFIED (0): + No stage specified. + PENDING_CREATE_APP (1): + Only for GitHub Enterprise. An App creation + has been requested. The user needs to confirm + the creation in their GitHub enterprise host. + PENDING_USER_OAUTH (2): + User needs to authorize the GitHub (or + Enterprise) App via OAuth. + PENDING_INSTALL_APP (3): + User needs to follow the link to install the + GitHub (or Enterprise) App. + COMPLETE (10): + Installation process has been completed. + """ + STAGE_UNSPECIFIED = 0 + PENDING_CREATE_APP = 1 + PENDING_USER_OAUTH = 2 + PENDING_INSTALL_APP = 3 + COMPLETE = 10 + + stage: Stage = proto.Field( + proto.ENUM, + number=1, + enum=Stage, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + action_uri: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableRepositoriesRequest(proto.Message): + r"""Request message for FetchLinkableRepositories. + + Attributes: + connection (str): + Required. The name of the Connection. Format: + ``projects/*/locations/*/connections/*``. + page_size (int): + Number of results to return in the list. + Default to 20. + page_token (str): + Page start. + """ + + connection: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableRepositoriesResponse(proto.Message): + r"""Response message for FetchLinkableRepositories. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + repositories ready to be created. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GitHubConfig(proto.Message): + r"""Configuration for connections to github.com. + + Attributes: + authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.OAuthCredential): + OAuth credential of the account that + authorized the Cloud Build GitHub App. It is + recommended to use a robot account instead of a + human user account. The OAuth token must be tied + to the Cloud Build GitHub App. + app_installation_id (int): + GitHub App installation id. + """ + + authorizer_credential: 'OAuthCredential' = proto.Field( + proto.MESSAGE, + number=1, + message='OAuthCredential', + ) + app_installation_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +class GitHubEnterpriseConfig(proto.Message): + r"""Configuration for connections to an instance of GitHub + Enterprise. + + Attributes: + host_uri (str): + Required. The URI of the GitHub Enterprise + host this connection is for. + api_key (str): + Required. API Key used for authentication of + webhook events. + app_id (int): + Id of the GitHub App created from the + manifest. + app_slug (str): + The URL-friendly name of the GitHub App. + private_key_secret_version (str): + SecretManager resource containing the private key of the + GitHub App, formatted as + ``projects/*/secrets/*/versions/*``. + webhook_secret_secret_version (str): + SecretManager resource containing the webhook secret of the + GitHub App, formatted as + ``projects/*/secrets/*/versions/*``. + app_installation_id (int): + ID of the installation of the GitHub App. + service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): + Configuration for using Service Directory to + privately connect to a GitHub Enterprise server. + This should only be set if the GitHub Enterprise + server is hosted on-premises and not reachable + by public internet. If this field is left empty, + calls to the GitHub Enterprise server will be + made over the public internet. + ssl_ca (str): + SSL certificate to use for requests to GitHub + Enterprise. + server_version (str): + Output only. GitHub Enterprise version installed at the + host_uri. + """ + + host_uri: str = proto.Field( + proto.STRING, + number=1, + ) + api_key: str = proto.Field( + proto.STRING, + number=12, + ) + app_id: int = proto.Field( + proto.INT64, + number=2, + ) + app_slug: str = proto.Field( + proto.STRING, + number=13, + ) + private_key_secret_version: str = proto.Field( + proto.STRING, + number=4, + ) + webhook_secret_secret_version: str = proto.Field( + proto.STRING, + number=5, + ) + app_installation_id: int = proto.Field( + proto.INT64, + number=9, + ) + service_directory_config: 'ServiceDirectoryConfig' = proto.Field( + proto.MESSAGE, + number=10, + message='ServiceDirectoryConfig', + ) + ssl_ca: str = proto.Field( + proto.STRING, + number=11, + ) + server_version: str = proto.Field( + proto.STRING, + number=14, + ) + + +class GitLabConfig(proto.Message): + r"""Configuration for connections to gitlab.com or an instance of + GitLab Enterprise. + + Attributes: + host_uri (str): + The URI of the GitLab Enterprise host this + connection is for. If not specified, the default + value is https://gitlab.com. + webhook_secret_secret_version (str): + Required. Immutable. SecretManager resource containing the + webhook secret of a GitLab Enterprise project, formatted as + ``projects/*/secrets/*/versions/*``. + read_authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): + Required. A GitLab personal access token with the minimum + ``read_api`` scope access. + authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): + Required. A GitLab personal access token with the ``api`` + scope access. + service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): + Configuration for using Service Directory to + privately connect to a GitLab Enterprise server. + This should only be set if the GitLab Enterprise + server is hosted on-premises and not reachable + by public internet. If this field is left empty, + calls to the GitLab Enterprise server will be + made over the public internet. + ssl_ca (str): + SSL certificate to use for requests to GitLab + Enterprise. + server_version (str): + Output only. Version of the GitLab Enterprise server running + on the ``host_uri``. + """ + + host_uri: str = proto.Field( + proto.STRING, + number=1, + ) + webhook_secret_secret_version: str = proto.Field( + proto.STRING, + number=2, + ) + read_authorizer_credential: 'UserCredential' = proto.Field( + proto.MESSAGE, + number=3, + message='UserCredential', + ) + authorizer_credential: 'UserCredential' = proto.Field( + proto.MESSAGE, + number=4, + message='UserCredential', + ) + service_directory_config: 'ServiceDirectoryConfig' = proto.Field( + proto.MESSAGE, + number=5, + message='ServiceDirectoryConfig', + ) + ssl_ca: str = proto.Field( + proto.STRING, + number=6, + ) + server_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ServiceDirectoryConfig(proto.Message): + r"""ServiceDirectoryConfig represents Service Directory + configuration for a connection. + + Attributes: + service (str): + Required. The Service Directory service name. + Format: + projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Repository(proto.Message): + r"""A repository associated to a parent connection. + + Attributes: + name (str): + Immutable. Resource name of the repository, in the format + ``projects/*/locations/*/connections/*/repositories/*``. + remote_uri (str): + Required. Git Clone HTTPS URI. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was updated. + annotations (MutableMapping[str, str]): + Allows clients to store small amounts of + arbitrary data. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + webhook_id (str): + Output only. External ID of the webhook + created for the repository. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + remote_uri: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + etag: str = proto.Field( + proto.STRING, + number=7, + ) + webhook_id: str = proto.Field( + proto.STRING, + number=8, + ) + + +class OAuthCredential(proto.Message): + r"""Represents an OAuth token of the account that authorized the + Connection, and associated metadata. + + Attributes: + oauth_token_secret_version (str): + A SecretManager resource containing the OAuth token that + authorizes the Cloud Build connection. Format: + ``projects/*/secrets/*/versions/*``. + username (str): + Output only. The username associated to this + token. + """ + + oauth_token_secret_version: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UserCredential(proto.Message): + r"""Represents a personal access token that authorized the + Connection, and associated metadata. + + Attributes: + user_token_secret_version (str): + Required. A SecretManager resource containing the user token + that authorizes the Cloud Build connection. Format: + ``projects/*/secrets/*/versions/*``. + username (str): + Output only. The username associated to this + token. + """ + + user_token_secret_version: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateConnectionRequest(proto.Message): + r"""Message for creating a Connection + + Attributes: + parent (str): + Required. Project and location where the connection will be + created. Format: ``projects/*/locations/*``. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to create. + connection_id (str): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=2, + message='Connection', + ) + connection_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetConnectionRequest(proto.Message): + r"""Message for getting the details of a Connection. + + Attributes: + name (str): + Required. The name of the Connection to retrieve. Format: + ``projects/*/locations/*/connections/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionsRequest(proto.Message): + r"""Message for requesting list of Connections. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + page_size (int): + Number of results to return in the list. + page_token (str): + Page start. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConnectionsResponse(proto.Message): + r"""Message for response to listing Connections. + + Attributes: + connections (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Connection]): + The list of Connections. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence['Connection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Connection', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateConnectionRequest(proto.Message): + r"""Message for updating a Connection. + + Attributes: + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + allow_missing (bool): + If set to true, and the connection is not found a new + connection will be created. In this situation + ``update_mask`` is ignored. The creation will succeed only + if the input connection has all the necessary information + (e.g a github_config with both user_oauth_token and + installation_id properties). + etag (str): + The current etag of the connection. + If an etag is provided and does not match the + current etag of the connection, update will be + blocked and an ABORTED error will be returned. + """ + + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=1, + message='Connection', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteConnectionRequest(proto.Message): + r"""Message for deleting a Connection. + + Attributes: + name (str): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + etag (str): + The current etag of the connection. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + If set, validate the request, but do not + actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CreateRepositoryRequest(proto.Message): + r"""Message for creating a Repository. + + Attributes: + parent (str): + Required. The connection to contain the + repository. If the request is part of a + BatchCreateRepositoriesRequest, this field + should be empty or match the parent specified + there. + repository (google.cloud.devtools.cloudbuild_v2.types.Repository): + Required. The repository to create. + repository_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + repository: 'Repository' = proto.Field( + proto.MESSAGE, + number=2, + message='Repository', + ) + repository_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BatchCreateRepositoriesRequest(proto.Message): + r"""Message for creating repositoritories in batch. + + Attributes: + parent (str): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field in + the CreateRepositoryRequest messages must either be empty or + match this field. + requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): + Required. The request messages specifying the + repositories to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence['CreateRepositoryRequest'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='CreateRepositoryRequest', + ) + + +class BatchCreateRepositoriesResponse(proto.Message): + r"""Message for response of creating repositories in batch. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + Repository resources created. + """ + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + + +class GetRepositoryRequest(proto.Message): + r"""Message for getting the details of a Repository. + + Attributes: + name (str): + Required. The name of the Repository to retrieve. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRepositoriesRequest(proto.Message): + r"""Message for requesting list of Repositories. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + page_size (int): + Number of results to return in the list. + page_token (str): + Page start. + filter (str): + A filter expression that filters resources listed in the + response. Expressions must follow API improvement proposal + `AIP-160 `__. e.g. + ``remote_uri:"https://github.com*"``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListRepositoriesResponse(proto.Message): + r"""Message for response to listing Repositories. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + The list of Repositories. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteRepositoryRequest(proto.Message): + r"""Message for deleting a Repository. + + Attributes: + name (str): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + etag (str): + The current etag of the repository. + If an etag is provided and does not match the + current etag of the repository, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + If set, validate the request, but do not + actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class FetchReadWriteTokenRequest(proto.Message): + r"""Message for fetching SCM read/write token. + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenRequest(proto.Message): + r"""Message for fetching SCM read token. + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenResponse(proto.Message): + r"""Message for responding to get read token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class FetchReadWriteTokenResponse(proto.Message): + r"""Message for responding to get read/write token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ProcessWebhookRequest(proto.Message): + r"""RPC request object accepted by the ProcessWebhook RPC method. + + Attributes: + parent (str): + Required. Project and location where the webhook will be + received. Format: ``projects/*/locations/*``. + body (google.api.httpbody_pb2.HttpBody): + HTTP request body. + webhook_key (str): + Arbitrary additional key to find the maching + repository for a webhook event if needed. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=2, + message=httpbody_pb2.HttpBody, + ) + webhook_key: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchGitRefsRequest(proto.Message): + r"""Request for fetching git refs + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + ref_type (google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest.RefType): + Type of refs to fetch + """ + class RefType(proto.Enum): + r"""Type of refs + + Values: + REF_TYPE_UNSPECIFIED (0): + No type specified. + TAG (1): + To fetch tags. + BRANCH (2): + To fetch branches. + """ + REF_TYPE_UNSPECIFIED = 0 + TAG = 1 + BRANCH = 2 + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + ref_type: RefType = proto.Field( + proto.ENUM, + number=2, + enum=RefType, + ) + + +class FetchGitRefsResponse(proto.Message): + r"""Response for fetching git refs + + Attributes: + ref_names (MutableSequence[str]): + Name of the refs fetched. + """ + + ref_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..89095013 --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/devtools/cloudbuild_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py new file mode 100644 index 00000000..fa9a6929 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py new file mode 100644 index 00000000..cad8baae --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py new file mode 100644 index 00000000..066f3245 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py new file mode 100644 index 00000000..d393a554 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py new file mode 100644 index 00000000..52aaa857 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py new file mode 100644 index 00000000..eb9a5e29 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py new file mode 100644 index 00000000..ef37e513 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py new file mode 100644 index 00000000..a18ff650 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py new file mode 100644 index 00000000..58a5dac2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py new file mode 100644 index 00000000..f141cb54 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py new file mode 100644 index 00000000..2c639684 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_git_refs(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py new file mode 100644 index 00000000..fde064f0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_git_refs(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py new file mode 100644 index 00000000..c6c744fc --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchLinkableRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py new file mode 100644 index 00000000..9d422598 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchLinkableRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py new file mode 100644 index 00000000..b110edb9 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py new file mode 100644 index 00000000..08680d32 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py new file mode 100644 index 00000000..f2fab11e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py new file mode 100644 index 00000000..64062425 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py new file mode 100644 index 00000000..cbce2c58 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py new file mode 100644 index 00000000..7da0f760 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py new file mode 100644 index 00000000..077cd120 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_repository(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py new file mode 100644 index 00000000..4f0bbd72 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_repository(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py new file mode 100644 index 00000000..78e39000 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py new file mode 100644 index 00000000..b0b6783d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py new file mode 100644 index 00000000..6140bd1a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py new file mode 100644 index 00000000..b133c8eb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py new file mode 100644 index 00000000..792d9cd7 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py new file mode 100644 index 00000000..f1583940 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json new file mode 100644 index 00000000..818d3fc2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -0,0 +1,2309 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.devtools.cloudbuild.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-build", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.batch_create_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "BatchCreateRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_repositories" + }, + "description": "Sample for BatchCreateRepositories", + "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.batch_create_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "BatchCreateRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_repositories" + }, + "description": "Sample for BatchCreateRepositories", + "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "cloudbuild_v2_generated_repository_manager_create_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "repository", + "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" + }, + { + "name": "repository_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_repository" + }, + "description": "Sample for CreateRepository", + "file": "cloudbuild_v2_generated_repository_manager_create_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "repository", + "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" + }, + { + "name": "repository_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_repository" + }, + "description": "Sample for CreateRepository", + "file": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_repository" + }, + "description": "Sample for DeleteRepository", + "file": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_repository" + }, + "description": "Sample for DeleteRepository", + "file": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_git_refs", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_git_refs", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_linkable_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchLinkableRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager", + "shortName": "fetch_linkable_repositories" + }, + "description": "Sample for FetchLinkableRepositories", + "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_linkable_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchLinkableRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager", + "shortName": "fetch_linkable_repositories" + }, + "description": "Sample for FetchLinkableRepositories", + "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_write_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_write_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "cloudbuild_v2_generated_repository_manager_get_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", + "shortName": "get_repository" + }, + "description": "Sample for GetRepository", + "file": "cloudbuild_v2_generated_repository_manager_get_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", + "shortName": "get_repository" + }, + "description": "Sample for GetRepository", + "file": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_connections", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "cloudbuild_v2_generated_repository_manager_list_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_connections", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.update_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "cloudbuild_v2_generated_repository_manager_update_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_update_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.update_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py" + } + ] +} diff --git a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py new file mode 100644 index 00000000..6df46861 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudbuildCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_repositories': ('parent', 'requests', ), + 'create_connection': ('parent', 'connection', 'connection_id', ), + 'create_repository': ('parent', 'repository', 'repository_id', ), + 'delete_connection': ('name', 'etag', 'validate_only', ), + 'delete_repository': ('name', 'etag', 'validate_only', ), + 'fetch_git_refs': ('repository', 'ref_type', ), + 'fetch_linkable_repositories': ('connection', 'page_size', 'page_token', ), + 'fetch_read_token': ('repository', ), + 'fetch_read_write_token': ('repository', ), + 'get_connection': ('name', ), + 'get_repository': ('name', ), + 'list_connections': ('parent', 'page_size', 'page_token', ), + 'list_repositories': ('parent', 'page_size', 'page_token', 'filter', ), + 'update_connection': ('connection', 'update_mask', 'allow_missing', 'etag', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudbuildCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudbuild client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..525db897 --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-build' + + +description = "Google Cloud Build API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/python-build" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud", "google.cloud.devtools"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt new file mode 100644 index 00000000..2beecf99 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py new file mode 100644 index 00000000..786103fe --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -0,0 +1,9596 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerAsyncClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import transports +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RepositoryManagerClient._get_default_mtls_endpoint(None) is None + assert RepositoryManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RepositoryManagerClient, "grpc"), + (RepositoryManagerAsyncClient, "grpc_asyncio"), + (RepositoryManagerClient, "rest"), +]) +def test_repository_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RepositoryManagerGrpcTransport, "grpc"), + (transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RepositoryManagerRestTransport, "rest"), +]) +def test_repository_manager_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RepositoryManagerClient, "grpc"), + (RepositoryManagerAsyncClient, "grpc_asyncio"), + (RepositoryManagerClient, "rest"), +]) +def test_repository_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +def test_repository_manager_client_get_transport_class(): + transport = RepositoryManagerClient.get_transport_class() + available_transports = [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerRestTransport, + ] + assert transport in available_transports + + transport = RepositoryManagerClient.get_transport_class("grpc") + assert transport == transports.RepositoryManagerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +def test_repository_manager_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "true"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "false"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "true"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "false"), +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_repository_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RepositoryManagerClient, RepositoryManagerAsyncClient +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +def test_repository_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), +]) +def test_repository_manager_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", None), +]) +def test_repository_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_repository_manager_client_client_options_from_dict(): + with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = RepositoryManagerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_repository_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateConnectionRequest, + dict, +]) +def test_create_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + client.create_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + +@pytest.mark.asyncio +async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_connection_async_from_dict(): + await test_create_connection_async(request_type=dict) + + +def test_create_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].connection_id + mock_val = 'connection_id_value' + assert arg == mock_val + + +def test_create_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].connection_id + mock_val = 'connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetConnectionRequest, + dict, +]) +def test_get_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + ) + response = client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +def test_get_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + client.get_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + +@pytest.mark.asyncio +async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.GetConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + )) + response = await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_connection_async_from_dict(): + await test_get_connection_async(request_type=dict) + + +def test_get_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = repositories.Connection() + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListConnectionsRequest, + dict, +]) +def test_list_connections(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + +@pytest.mark.asyncio +async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=repositories.ListConnectionsRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + + +def test_list_connections_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = repositories.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_connections_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Connection) + for i in results) +def test_list_connections_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Connection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.UpdateConnectionRequest, + dict, +]) +def test_update_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + client.update_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + +@pytest.mark.asyncio +async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.UpdateConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_connection_async_from_dict(): + await test_update_connection_async(request_type=dict) + + +def test_update_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.UpdateConnectionRequest() + + request.connection.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.UpdateConnectionRequest() + + request.connection.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection.name=name_value', + ) in kw['metadata'] + + +def test_update_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteConnectionRequest, + dict, +]) +def test_delete_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + client.delete_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + +@pytest.mark.asyncio +async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_connection_async_from_dict(): + await test_delete_connection_async(request_type=dict) + + +def test_delete_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateRepositoryRequest, + dict, +]) +def test_create_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + client.create_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + +@pytest.mark.asyncio +async def test_create_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_repository_async_from_dict(): + await test_create_repository_async(request_type=dict) + + +def test_create_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateRepositoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateRepositoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_repository( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].repository + mock_val = repositories.Repository(name='name_value') + assert arg == mock_val + arg = args[0].repository_id + mock_val = 'repository_id_value' + assert arg == mock_val + + +def test_create_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + +@pytest.mark.asyncio +async def test_create_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_repository( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].repository + mock_val = repositories.Repository(name='name_value') + assert arg == mock_val + arg = args[0].repository_id + mock_val = 'repository_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.BatchCreateRepositoriesRequest, + dict, +]) +def test_batch_create_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + client.batch_create_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + +@pytest.mark.asyncio +async def test_batch_create_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.BatchCreateRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_repositories_async_from_dict(): + await test_batch_create_repositories_async(request_type=dict) + + +def test_batch_create_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.BatchCreateRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_create_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.BatchCreateRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_batch_create_repositories_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_repositories( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].requests + mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] + assert arg == mock_val + + +def test_batch_create_repositories_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + +@pytest.mark.asyncio +async def test_batch_create_repositories_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_repositories( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].requests + mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_create_repositories_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetRepositoryRequest, + dict, +]) +def test_get_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + ) + response = client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +def test_get_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + client.get_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + +@pytest.mark.asyncio +async def test_get_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.GetRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + )) + response = await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +@pytest.mark.asyncio +async def test_get_repository_async_from_dict(): + await test_get_repository_async(request_type=dict) + + +def test_get_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + call.return_value = repositories.Repository() + client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) + await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListRepositoriesRequest, + dict, +]) +def test_list_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + client.list_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + +@pytest.mark.asyncio +async def test_list_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.ListRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_repositories_async_from_dict(): + await test_list_repositories_async(request_type=dict) + + +def test_list_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + call.return_value = repositories.ListRepositoriesResponse() + client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) + await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_repositories_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_repositories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_repositories_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_repositories_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_repositories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_repositories_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + + +def test_list_repositories_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_repositories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) +def test_list_repositories_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = list(client.list_repositories(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_repositories_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_repositories(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Repository) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_repositories_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_repositories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteRepositoryRequest, + dict, +]) +def test_delete_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + client.delete_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + +@pytest.mark.asyncio +async def test_delete_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_repository_async_from_dict(): + await test_delete_repository_async(request_type=dict) + + +def test_delete_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadWriteTokenRequest, + dict, +]) +def test_fetch_read_write_token(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse( + token='token_value', + ) + response = client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_write_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + client.fetch_read_write_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadWriteTokenRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse( + token='token_value', + )) + response = await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async_from_dict(): + await test_fetch_read_write_token_async(request_type=dict) + + +def test_fetch_read_write_token_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadWriteTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + call.return_value = repositories.FetchReadWriteTokenResponse() + client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadWriteTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) + await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_read_write_token_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_write_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_read_write_token_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_write_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadTokenRequest, + dict, +]) +def test_fetch_read_token(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse( + token='token_value', + ) + response = client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + client.fetch_read_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + +@pytest.mark.asyncio +async def test_fetch_read_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadTokenRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse( + token='token_value', + )) + response = await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.asyncio +async def test_fetch_read_token_async_from_dict(): + await test_fetch_read_token_async(request_type=dict) + + +def test_fetch_read_token_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + call.return_value = repositories.FetchReadTokenResponse() + client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_read_token_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) + await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_read_token_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_read_token_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchLinkableRepositoriesRequest, + dict, +]) +def test_fetch_linkable_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + ) + response = client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_linkable_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + client.fetch_linkable_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchLinkableRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_from_dict(): + await test_fetch_linkable_repositories_async(request_type=dict) + + +def test_fetch_linkable_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchLinkableRepositoriesRequest() + + request.connection = 'connection_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + call.return_value = repositories.FetchLinkableRepositoriesResponse() + client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection=connection_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchLinkableRepositoriesRequest() + + request.connection = 'connection_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse()) + await client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection=connection_value', + ) in kw['metadata'] + + +def test_fetch_linkable_repositories_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('connection', ''), + )), + ) + pager = client.fetch_linkable_repositories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) +def test_fetch_linkable_repositories_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_linkable_repositories(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_linkable_repositories(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Repository) + for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_linkable_repositories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.FetchGitRefsRequest, + dict, +]) +def test_fetch_git_refs(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + ) + response = client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +def test_fetch_git_refs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + client.fetch_git_refs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + +@pytest.mark.asyncio +async def test_fetch_git_refs_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchGitRefsRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + )) + response = await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_from_dict(): + await test_fetch_git_refs_async(request_type=dict) + + +def test_fetch_git_refs_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchGitRefsRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + call.return_value = repositories.FetchGitRefsResponse() + client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_git_refs_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchGitRefsRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) + await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_git_refs_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_git_refs( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_git_refs_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_git_refs( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateConnectionRequest, + dict, +]) +def test_create_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_connection_rest_required_fields(request_type=repositories.CreateConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "connectionId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == request_init["connection_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["connectionId"] = 'connection_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("connection_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == 'connection_id_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_connection(request) + + expected_params = [ + ( + "connectionId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("connectionId", )) & set(("parent", "connection", "connectionId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.CreateConnectionRequest.pb(repositories.CreateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.CreateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection(request) + + +def test_create_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_create_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + +def test_create_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetConnectionRequest, + dict, +]) +def test_get_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +def test_get_connection_rest_required_fields(request_type=repositories.GetConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.GetConnectionRequest.pb(repositories.GetConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.Connection.to_json(repositories.Connection()) + + request = repositories.GetConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.Connection() + + client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.GetConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection(request) + + +def test_get_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_get_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + + +def test_get_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListConnectionsRequest, + dict, +]) +def test_list_connections_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_connections_rest_required_fields(request_type=repositories.ListConnectionsRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_connections(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_connections_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connections_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_connections") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.ListConnectionsRequest.pb(repositories.ListConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.ListConnectionsResponse.to_json(repositories.ListConnectionsResponse()) + + request = repositories.ListConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.ListConnectionsResponse() + + client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connections_rest_bad_request(transport: str = 'rest', request_type=repositories.ListConnectionsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connections(request) + + +def test_list_connections_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_list_connections_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.ListConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Connection) + for i in results) + + pages = list(client.list_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.UpdateConnectionRequest, + dict, +]) +def test_update_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_connection_rest_required_fields(request_type=repositories.UpdateConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "updateMask", )) & set(("connection", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_update_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_update_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.UpdateConnectionRequest.pb(repositories.UpdateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.UpdateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.UpdateConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection(request) + + +def test_update_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{connection.name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_update_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteConnectionRequest, + dict, +]) +def test_delete_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_connection_rest_required_fields(request_type=repositories.DeleteConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.DeleteConnectionRequest.pb(repositories.DeleteConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.DeleteConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection(request) + + +def test_delete_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_delete_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + + +def test_delete_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateRepositoryRequest, + dict, +]) +def test_create_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_repository(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_repository_rest_required_fields(request_type=repositories.CreateRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["repository_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "repositoryId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == request_init["repository_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["repositoryId"] = 'repository_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("repository_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == 'repository_id_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_repository(request) + + expected_params = [ + ( + "repositoryId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(("repositoryId", )) & set(("parent", "repository", "repositoryId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.CreateRepositoryRequest.pb(repositories.CreateRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.CreateRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_repository(request) + + +def test_create_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) + + +def test_create_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + +def test_create_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.BatchCreateRepositoriesRequest, + dict, +]) +def test_batch_create_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_create_repositories(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_repositories_rest_required_fields(request_type=repositories.BatchCreateRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_create_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_create_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_create_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "requests", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_batch_create_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_batch_create_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.BatchCreateRepositoriesRequest.pb(repositories.BatchCreateRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.BatchCreateRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.BatchCreateRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_repositories(request) + + +def test_batch_create_repositories_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.batch_create_repositories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate" % client.transport._host, args[1]) + + +def test_batch_create_repositories_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + +def test_batch_create_repositories_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetRepositoryRequest, + dict, +]) +def test_get_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_repository(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +def test_get_repository_rest_required_fields(request_type=repositories.GetRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.Repository() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_repository(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.GetRepositoryRequest.pb(repositories.GetRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.Repository.to_json(repositories.Repository()) + + request = repositories.GetRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.Repository() + + client.get_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.GetRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_repository(request) + + +def test_get_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Repository() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) + + +def test_get_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + + +def test_get_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListRepositoriesRequest, + dict, +]) +def test_list_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_repositories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_repositories_rest_required_fields(request_type=repositories.ListRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.ListRepositoriesRequest.pb(repositories.ListRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.ListRepositoriesResponse.to_json(repositories.ListRepositoriesResponse()) + + request = repositories.ListRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.ListRepositoriesResponse() + + client.list_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.ListRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_repositories(request) + + +def test_list_repositories_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_repositories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) + + +def test_list_repositories_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + + +def test_list_repositories_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.ListRepositoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + pager = client.list_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) + + pages = list(client.list_repositories(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteRepositoryRequest, + dict, +]) +def test_delete_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_repository(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_repository_rest_required_fields(request_type=repositories.DeleteRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_repository(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.DeleteRepositoryRequest.pb(repositories.DeleteRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.DeleteRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_repository(request) + + +def test_delete_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) + + +def test_delete_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + + +def test_delete_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadWriteTokenRequest, + dict, +]) +def test_fetch_read_write_token_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_read_write_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_write_token_rest_required_fields(request_type=repositories.FetchReadWriteTokenRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_read_write_token(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_read_write_token_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_read_write_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_write_token_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_write_token") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_write_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchReadWriteTokenRequest.pb(repositories.FetchReadWriteTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchReadWriteTokenResponse.to_json(repositories.FetchReadWriteTokenResponse()) + + request = repositories.FetchReadWriteTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchReadWriteTokenResponse() + + client.fetch_read_write_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_write_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadWriteTokenRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_write_token(request) + + +def test_fetch_read_write_token_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_read_write_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken" % client.transport._host, args[1]) + + +def test_fetch_read_write_token_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + + +def test_fetch_read_write_token_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadTokenRequest, + dict, +]) +def test_fetch_read_token_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_read_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_token_rest_required_fields(request_type=repositories.FetchReadTokenRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_read_token(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_read_token_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_read_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_token_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_token") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchReadTokenRequest.pb(repositories.FetchReadTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchReadTokenResponse.to_json(repositories.FetchReadTokenResponse()) + + request = repositories.FetchReadTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchReadTokenResponse() + + client.fetch_read_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadTokenRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_token(request) + + +def test_fetch_read_token_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_read_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken" % client.transport._host, args[1]) + + +def test_fetch_read_token_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + + +def test_fetch_read_token_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchLinkableRepositoriesRequest, + dict, +]) +def test_fetch_linkable_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_linkable_repositories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_linkable_repositories_rest_required_fields(request_type=repositories.FetchLinkableRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["connection"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["connection"] = 'connection_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "connection" in jsonified_request + assert jsonified_request["connection"] == 'connection_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchLinkableRepositoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_linkable_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_linkable_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_linkable_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("connection", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_linkable_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_linkable_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchLinkableRepositoriesRequest.pb(repositories.FetchLinkableRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchLinkableRepositoriesResponse.to_json(repositories.FetchLinkableRepositoriesResponse()) + + request = repositories.FetchLinkableRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchLinkableRepositoriesResponse() + + client.fetch_linkable_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_linkable_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchLinkableRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_linkable_repositories(request) + + +def test_fetch_linkable_repositories_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.FetchLinkableRepositoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + + pager = client.fetch_linkable_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) + + pages = list(client.fetch_linkable_repositories(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchGitRefsRequest, + dict, +]) +def test_fetch_git_refs_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_git_refs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +def test_fetch_git_refs_rest_required_fields(request_type=repositories.FetchGitRefsRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("ref_type", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_git_refs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_git_refs_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_git_refs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("refType", )) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_git_refs_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_git_refs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchGitRefsRequest.pb(repositories.FetchGitRefsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchGitRefsResponse.to_json(repositories.FetchGitRefsResponse()) + + request = repositories.FetchGitRefsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchGitRefsResponse() + + client.fetch_git_refs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_git_refs_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchGitRefsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_git_refs(request) + + +def test_fetch_git_refs_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_git_refs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs" % client.transport._host, args[1]) + + +def test_fetch_git_refs_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + + +def test_fetch_git_refs_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RepositoryManagerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RepositoryManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + transports.RepositoryManagerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = RepositoryManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RepositoryManagerGrpcTransport, + ) + +def test_repository_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RepositoryManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_repository_manager_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RepositoryManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_connection', + 'get_connection', + 'list_connections', + 'update_connection', + 'delete_connection', + 'create_repository', + 'batch_create_repositories', + 'get_repository', + 'list_repositories', + 'delete_repository', + 'fetch_read_write_token', + 'fetch_read_token', + 'fetch_linkable_repositories', + 'fetch_git_refs', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_operation', + 'cancel_operation', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_repository_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RepositoryManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_repository_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RepositoryManagerTransport() + adc.assert_called_once() + + +def test_repository_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RepositoryManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + ], +) +def test_repository_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + transports.RepositoryManagerRestTransport, + ], +) +def test_repository_manager_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RepositoryManagerGrpcTransport, grpc_helpers), + (transports.RepositoryManagerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_repository_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_repository_manager_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RepositoryManagerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_repository_manager_rest_lro_client(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_repository_manager_host_no_port(transport_name): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_repository_manager_host_with_port(transport_name): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_repository_manager_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RepositoryManagerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RepositoryManagerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_connection._session + session2 = client2.transport.create_connection._session + assert session1 != session2 + session1 = client1.transport.get_connection._session + session2 = client2.transport.get_connection._session + assert session1 != session2 + session1 = client1.transport.list_connections._session + session2 = client2.transport.list_connections._session + assert session1 != session2 + session1 = client1.transport.update_connection._session + session2 = client2.transport.update_connection._session + assert session1 != session2 + session1 = client1.transport.delete_connection._session + session2 = client2.transport.delete_connection._session + assert session1 != session2 + session1 = client1.transport.create_repository._session + session2 = client2.transport.create_repository._session + assert session1 != session2 + session1 = client1.transport.batch_create_repositories._session + session2 = client2.transport.batch_create_repositories._session + assert session1 != session2 + session1 = client1.transport.get_repository._session + session2 = client2.transport.get_repository._session + assert session1 != session2 + session1 = client1.transport.list_repositories._session + session2 = client2.transport.list_repositories._session + assert session1 != session2 + session1 = client1.transport.delete_repository._session + session2 = client2.transport.delete_repository._session + assert session1 != session2 + session1 = client1.transport.fetch_read_write_token._session + session2 = client2.transport.fetch_read_write_token._session + assert session1 != session2 + session1 = client1.transport.fetch_read_token._session + session2 = client2.transport.fetch_read_token._session + assert session1 != session2 + session1 = client1.transport.fetch_linkable_repositories._session + session2 = client2.transport.fetch_linkable_repositories._session + assert session1 != session2 + session1 = client1.transport.fetch_git_refs._session + session2 = client2.transport.fetch_git_refs._session + assert session1 != session2 +def test_repository_manager_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RepositoryManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_repository_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RepositoryManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_repository_manager_grpc_lro_client(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_repository_manager_grpc_lro_async_client(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_path(): + project = "squid" + location = "clam" + connection = "whelk" + expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + actual = RepositoryManagerClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = RepositoryManagerClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_connection_path(path) + assert expected == actual + +def test_repository_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + repository = "nautilus" + expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + actual = RepositoryManagerClient.repository_path(project, location, connection, repository) + assert expected == actual + + +def test_parse_repository_path(): + expected = { + "project": "scallop", + "location": "abalone", + "connection": "squid", + "repository": "clam", + } + path = RepositoryManagerClient.repository_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_repository_path(path) + assert expected == actual + +def test_secret_version_path(): + project = "whelk" + secret = "octopus" + version = "oyster" + expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + actual = RepositoryManagerClient.secret_version_path(project, secret, version) + assert expected == actual + + +def test_parse_secret_version_path(): + expected = { + "project": "nudibranch", + "secret": "cuttlefish", + "version": "mussel", + } + path = RepositoryManagerClient.secret_version_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_secret_version_path(path) + assert expected == actual + +def test_service_path(): + project = "winkle" + location = "nautilus" + namespace = "scallop" + service = "abalone" + expected = "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) + actual = RepositoryManagerClient.service_path(project, location, namespace, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "squid", + "location": "clam", + "namespace": "whelk", + "service": "octopus", + } + path = RepositoryManagerClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_service_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RepositoryManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = RepositoryManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = RepositoryManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = RepositoryManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RepositoryManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = RepositoryManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = RepositoryManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = RepositoryManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RepositoryManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = RepositoryManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: + transport_class = RepositoryManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_cancel_operation(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 57f51e98d170366dae5572baf29c391533789678 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 11 Jul 2023 23:51:34 +0000 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/cloud_build/async_client.py | 11 +- .../services/cloud_build/client.py | 337 +- .../services/cloud_build/transports/grpc.py | 8 +- .../cloud_build/transports/grpc_asyncio.py | 8 +- .../cloudbuild_v1/types/cloudbuild.py | 90 +- owl-bot-staging/v1/.coveragerc | 13 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - .../v1/docs/cloudbuild_v1/cloud_build.rst | 10 - .../v1/docs/cloudbuild_v1/services.rst | 6 - .../v1/docs/cloudbuild_v1/types.rst | 6 - owl-bot-staging/v1/docs/conf.py | 376 - owl-bot-staging/v1/docs/index.rst | 7 - .../cloud/devtools/cloudbuild/__init__.py | 151 - .../devtools/cloudbuild/gapic_version.py | 16 - .../google/cloud/devtools/cloudbuild/py.typed | 2 - .../cloud/devtools/cloudbuild_v1/__init__.py | 152 - .../cloudbuild_v1/gapic_metadata.json | 298 - .../devtools/cloudbuild_v1/gapic_version.py | 16 - .../cloud/devtools/cloudbuild_v1/py.typed | 2 - .../cloudbuild_v1/services/__init__.py | 15 - .../services/cloud_build/__init__.py | 22 - .../services/cloud_build/async_client.py | 2601 ---- .../services/cloud_build/client.py | 2899 ----- .../services/cloud_build/pagers.py | 381 - .../cloud_build/transports/__init__.py | 38 - .../services/cloud_build/transports/base.py | 443 - .../services/cloud_build/transports/grpc.py | 793 -- .../cloud_build/transports/grpc_asyncio.py | 792 -- .../services/cloud_build/transports/rest.py | 2419 ---- .../devtools/cloudbuild_v1/types/__init__.py | 144 - .../cloudbuild_v1/types/cloudbuild.py | 3680 ------ owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...nerated_cloud_build_approve_build_async.py | 56 - ...enerated_cloud_build_approve_build_sync.py | 56 - ...enerated_cloud_build_cancel_build_async.py | 53 - ...generated_cloud_build_cancel_build_sync.py | 53 - ...enerated_cloud_build_create_build_async.py | 56 - ...generated_cloud_build_create_build_sync.py | 56 - ..._cloud_build_create_build_trigger_async.py | 56 - ...d_cloud_build_create_build_trigger_sync.py | 56 - ...ed_cloud_build_create_worker_pool_async.py | 57 - ...ted_cloud_build_create_worker_pool_sync.py | 57 - ..._cloud_build_delete_build_trigger_async.py | 51 - ...d_cloud_build_delete_build_trigger_sync.py | 51 - ...ed_cloud_build_delete_worker_pool_async.py | 56 - ...ted_cloud_build_delete_worker_pool_sync.py | 56 - ...1_generated_cloud_build_get_build_async.py | 53 - ...v1_generated_cloud_build_get_build_sync.py | 53 - ...ted_cloud_build_get_build_trigger_async.py | 53 - ...ated_cloud_build_get_build_trigger_sync.py | 53 - ...rated_cloud_build_get_worker_pool_async.py | 52 - ...erated_cloud_build_get_worker_pool_sync.py | 52 - ...d_cloud_build_list_build_triggers_async.py | 53 - ...ed_cloud_build_list_build_triggers_sync.py | 53 - ...generated_cloud_build_list_builds_async.py | 53 - ..._generated_cloud_build_list_builds_sync.py | 53 - ...ted_cloud_build_list_worker_pools_async.py | 53 - ...ated_cloud_build_list_worker_pools_sync.py | 53 - ...oud_build_receive_trigger_webhook_async.py | 51 - ...loud_build_receive_trigger_webhook_sync.py | 51 - ...generated_cloud_build_retry_build_async.py | 57 - ..._generated_cloud_build_retry_build_sync.py | 57 - ...ted_cloud_build_run_build_trigger_async.py | 57 - ...ated_cloud_build_run_build_trigger_sync.py | 57 - ..._cloud_build_update_build_trigger_async.py | 57 - ...d_cloud_build_update_build_trigger_sync.py | 57 - ...ed_cloud_build_update_worker_pool_async.py | 55 - ...ted_cloud_build_update_worker_pool_sync.py | 55 - ...etadata_google.devtools.cloudbuild.v1.json | 3027 ----- .../scripts/fixup_cloudbuild_v1_keywords.py | 193 - owl-bot-staging/v1/setup.py | 90 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 9 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/cloudbuild_v1/__init__.py | 16 - .../gapic/cloudbuild_v1/test_cloud_build.py | 10282 ---------------- owl-bot-staging/v2/.coveragerc | 13 - owl-bot-staging/v2/.flake8 | 33 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - .../docs/cloudbuild_v2/repository_manager.rst | 10 - .../v2/docs/cloudbuild_v2/services.rst | 6 - .../v2/docs/cloudbuild_v2/types.rst | 6 - owl-bot-staging/v2/docs/conf.py | 376 - owl-bot-staging/v2/docs/index.rst | 7 - .../cloud/devtools/cloudbuild/__init__.py | 93 - .../devtools/cloudbuild/gapic_version.py | 16 - .../google/cloud/devtools/cloudbuild/py.typed | 2 - .../cloud/devtools/cloudbuild_v2/__init__.py | 94 - .../cloudbuild_v2/gapic_metadata.json | 238 - .../devtools/cloudbuild_v2/gapic_version.py | 16 - .../cloud/devtools/cloudbuild_v2/py.typed | 2 - .../cloudbuild_v2/services/__init__.py | 15 - .../services/repository_manager/__init__.py | 22 - .../repository_manager/async_client.py | 2257 ---- .../services/repository_manager/client.py | 2445 ---- .../services/repository_manager/pagers.py | 381 - .../repository_manager/transports/__init__.py | 38 - .../repository_manager/transports/base.py | 431 - .../repository_manager/transports/grpc.py | 743 -- .../transports/grpc_asyncio.py | 742 -- .../repository_manager/transports/rest.py | 2275 ---- .../devtools/cloudbuild_v2/types/__init__.py | 88 - .../cloudbuild_v2/types/cloudbuild.py | 159 - .../cloudbuild_v2/types/repositories.py | 1104 -- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 184 - ...manager_batch_create_repositories_async.py | 62 - ..._manager_batch_create_repositories_sync.py | 62 - ...ository_manager_create_connection_async.py | 57 - ...pository_manager_create_connection_sync.py | 57 - ...ository_manager_create_repository_async.py | 61 - ...pository_manager_create_repository_sync.py | 61 - ...ository_manager_delete_connection_async.py | 56 - ...pository_manager_delete_connection_sync.py | 56 - ...ository_manager_delete_repository_async.py | 56 - ...pository_manager_delete_repository_sync.py | 56 - ...repository_manager_fetch_git_refs_async.py | 52 - ..._repository_manager_fetch_git_refs_sync.py | 52 - ...nager_fetch_linkable_repositories_async.py | 53 - ...anager_fetch_linkable_repositories_sync.py | 53 - ...pository_manager_fetch_read_token_async.py | 52 - ...epository_manager_fetch_read_token_sync.py | 52 - ...ry_manager_fetch_read_write_token_async.py | 52 - ...ory_manager_fetch_read_write_token_sync.py | 52 - ...repository_manager_get_connection_async.py | 52 - ..._repository_manager_get_connection_sync.py | 52 - ...repository_manager_get_repository_async.py | 52 - ..._repository_manager_get_repository_sync.py | 52 - ...pository_manager_list_connections_async.py | 53 - ...epository_manager_list_connections_sync.py | 53 - ...ository_manager_list_repositories_async.py | 53 - ...pository_manager_list_repositories_sync.py | 53 - ...ository_manager_update_connection_async.py | 55 - ...pository_manager_update_connection_sync.py | 55 - ...etadata_google.devtools.cloudbuild.v2.json | 2309 ---- .../scripts/fixup_cloudbuild_v2_keywords.py | 189 - owl-bot-staging/v2/setup.py | 91 - .../v2/testing/constraints-3.10.txt | 7 - .../v2/testing/constraints-3.11.txt | 7 - .../v2/testing/constraints-3.12.txt | 7 - .../v2/testing/constraints-3.7.txt | 10 - .../v2/testing/constraints-3.8.txt | 7 - .../v2/testing/constraints-3.9.txt | 7 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/cloudbuild_v2/__init__.py | 16 - .../cloudbuild_v2/test_repository_manager.py | 9596 -------------- .../gapic/cloudbuild_v1/test_cloud_build.py | 971 +- 159 files changed, 471 insertions(+), 57836 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/types.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json delete mode 100644 owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/.flake8 delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json delete mode 100644 owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index f1073ed3..953343d6 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -842,7 +842,7 @@ async def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -1778,6 +1778,12 @@ async def run_build_trigger( ) -> operation_async.AsyncOperation: r"""Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1825,6 +1831,9 @@ async def sample_run_build_trigger(): should not be set. source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + This corresponds to the ``source`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 507f2600..8870b9b6 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -208,18 +208,23 @@ def parse_build_path(path: str) -> Dict[str, str]: @staticmethod def build_trigger_path( project: str, + location: str, trigger: str, ) -> str: """Returns a fully-qualified build_trigger string.""" - return "projects/{project}/triggers/{trigger}".format( + return "projects/{project}/locations/{location}/triggers/{trigger}".format( project=project, + location=location, trigger=trigger, ) @staticmethod def parse_build_trigger_path(path: str) -> Dict[str, str]: """Parses a build_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -759,13 +764,19 @@ def sample_create_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -903,16 +914,19 @@ def sample_get_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1022,13 +1036,19 @@ def sample_list_builds(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_builds] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1163,16 +1183,19 @@ def sample_cancel_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.cancel_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1216,7 +1239,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -1337,16 +1360,19 @@ def sample_retry_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.retry_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1494,11 +1520,19 @@ def sample_approve_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.approve_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1618,13 +1652,19 @@ def sample_create_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1735,16 +1775,19 @@ def sample_get_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1846,13 +1889,19 @@ def sample_list_build_triggers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1960,16 +2009,19 @@ def sample_delete_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. rpc( @@ -2087,16 +2139,19 @@ def sample_update_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.trigger.resource_name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2122,6 +2177,12 @@ def run_build_trigger( ) -> operation.Operation: r"""Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2169,6 +2230,9 @@ def sample_run_build_trigger(): should not be set. source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + This corresponds to the ``source`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2239,16 +2303,19 @@ def sample_run_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2485,11 +2552,19 @@ def sample_create_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2608,11 +2683,19 @@ def sample_get_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2724,11 +2807,19 @@ def sample_delete_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2866,13 +2957,19 @@ def sample_update_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("worker_pool.name", request.worker_pool.name),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.worker_pool.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2981,11 +3078,19 @@ def sample_list_worker_pools(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index fd412cd1..4fb6ebea 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -393,7 +393,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -601,6 +601,12 @@ def run_build_trigger( Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + Returns: Callable[[~.RunBuildTriggerRequest], ~.Operation]: diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 9ad56231..dadb5934 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -402,7 +402,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -621,6 +621,12 @@ def run_build_trigger( Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + Returns: Callable[[~.RunBuildTriggerRequest], Awaitable[~.Operation]]: diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 93d01996..d073dba8 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -134,6 +134,8 @@ class RunBuildTriggerRequest(proto.Message): Required. ID of the trigger. source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): Source to build against this trigger. + Branch and tag names cannot consist of regular + expressions. """ name: str = proto.Field( @@ -156,23 +158,21 @@ class RunBuildTriggerRequest(proto.Message): class StorageSource(proto.Message): - r"""Location of the source in an archive file in Google Cloud - Storage. + r"""Location of the source in an archive file in Cloud Storage. Attributes: bucket (str): - Google Cloud Storage bucket containing the source (see - `Bucket Name + Cloud Storage bucket containing the source (see `Bucket Name Requirements `__). object_ (str): - Google Cloud Storage object containing the source. + Cloud Storage object containing the source. - This object must be a gzipped archive file (``.tar.gz``) - containing source to build. + This object must be a zipped (``.zip``) or gzipped archive + file (``.tar.gz``) containing source to build. generation (int): - Google Cloud Storage generation for the - object. If the generation is omitted, the latest - generation will be used. + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. """ bucket: str = proto.Field( @@ -321,23 +321,23 @@ class RepoSource(proto.Message): class StorageSourceManifest(proto.Message): - r"""Location of the source manifest in Google Cloud Storage. This - feature is in Preview; see description + r"""Location of the source manifest in Cloud Storage. This feature is in + Preview; see description `here `__. Attributes: bucket (str): - Google Cloud Storage bucket containing the source manifest - (see `Bucket Name + Cloud Storage bucket containing the source manifest (see + `Bucket Name Requirements `__). object_ (str): - Google Cloud Storage object containing the - source manifest. + Cloud Storage object containing the source + manifest. This object must be a JSON file. generation (int): - Google Cloud Storage generation for the - object. If the generation is omitted, the latest - generation will be used. + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. """ bucket: str = proto.Field( @@ -367,7 +367,7 @@ class Source(proto.Message): Attributes: storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): If provided, get the source from this - location in Google Cloud Storage. + location in Cloud Storage. This field is a member of `oneof`_ ``source``. repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): @@ -381,8 +381,8 @@ class Source(proto.Message): This field is a member of `oneof`_ ``source``. storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - If provided, get the source from this manifest in Google - Cloud Storage. This feature is in Preview; see description + If provided, get the source from this manifest in Cloud + Storage. This feature is in Preview; see description `here `__. This field is a member of `oneof`_ ``source``. @@ -842,8 +842,8 @@ class ArtifactResult(proto.Message): Attributes: location (str): - The path of an artifact in a Google Cloud Storage bucket, - with the generation number. For example, + The path of an artifact in a Cloud Storage bucket, with the + generation number. For example, ``gs://mybucket/path/to/output.jar#generation``. file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): The file hash of the artifact. @@ -947,8 +947,8 @@ class Build(proto.Message): be uploaded upon successful completion of all build steps. logs_bucket (str): - Google Cloud Storage bucket where logs should be written - (see `Bucket Name + Cloud Storage bucket where logs should be written (see + `Bucket Name Requirements `__). Logs file names will be of the format ``${logs_bucket}/log-${build_id}.txt``. @@ -2368,7 +2368,6 @@ class RepositoryType(proto.Enum): class GitHubEventsConfig(proto.Message): r"""GitHubEventsConfig describes the configuration of a trigger that creates a build whenever a GitHub event is received. - This message is experimental. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -2859,7 +2858,7 @@ class BuildOptions(proto.Message): configuration file. log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): Option to define build log streaming behavior - to Google Cloud Storage. + to Cloud Storage. worker_pool (str): This field deprecated; please use ``pool.name`` instead. pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): @@ -2904,8 +2903,18 @@ class BuildOptions(proto.Message): """ class VerifyOption(proto.Enum): - r"""Specifies the manner in which the build should be verified, - if at all. + r"""Specifies the manner in which the build should be verified, if at + all. + + If a verified build is requested, and any part of the process to + generate and upload provenance fails, the build will also fail. + + If the build does not request verification then that process may + occur, but is not guaranteed to. If it does occur and fails, the + build will not fail. + + For more information, see `Viewing Build + Provenance `__. Values: NOT_VERIFIED (0): @@ -2959,20 +2968,20 @@ class SubstitutionOption(proto.Enum): ALLOW_LOOSE = 1 class LogStreamingOption(proto.Enum): - r"""Specifies the behavior when writing build logs to Google - Cloud Storage. + r"""Specifies the behavior when writing build logs to Cloud + Storage. Values: STREAM_DEFAULT (0): Service may automatically determine build log streaming behavior. STREAM_ON (1): - Build logs should be streamed to Google Cloud + Build logs should be streamed to Cloud Storage. STREAM_OFF (2): - Build logs should not be streamed to Google - Cloud Storage; they will be written when the - build is completed. + Build logs should not be streamed to Cloud + Storage; they will be written when the build is + completed. """ STREAM_DEFAULT = 0 STREAM_ON = 1 @@ -3238,12 +3247,15 @@ class State(proto.Enum): draining workers. DELETED (4): ``WorkerPool`` is deleted. + UPDATING (5): + ``WorkerPool`` is being updated; new builds cannot be run. """ STATE_UNSPECIFIED = 0 CREATING = 1 RUNNING = 2 DELETING = 3 DELETED = 4 + UPDATING = 5 name: str = proto.Field( proto.STRING, @@ -3468,9 +3480,9 @@ class DeleteWorkerPoolRequest(proto.Message): Required. The name of the ``WorkerPool`` to delete. Format: ``projects/{project}/locations/{location}/workerPools/{workerPool}``. etag (str): - Optional. If this is provided, it must match - the server's etag on the workerpool for the - request to be processed. + Optional. If provided, it must match the + server's etag on the workerpool for the request + to be processed. allow_missing (bool): If set to true, and the ``WorkerPool`` is not found, the request will succeed but no action will be taken on the diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index a0cf72db..00000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/devtools/cloudbuild/__init__.py - google/cloud/devtools/cloudbuild/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index af14cd40..00000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/devtools/cloudbuild *.py -recursive-include google/cloud/devtools/cloudbuild_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index c788a1b3..00000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Devtools Cloudbuild API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Devtools Cloudbuild API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst deleted file mode 100644 index be81dc5c..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudBuild ----------------------------- - -.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build - :members: - :inherited-members: - -.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst deleted file mode 100644 index c0bdc88d..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Devtools Cloudbuild v1 API -==================================================== -.. toctree:: - :maxdepth: 2 - - cloud_build diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst deleted file mode 100644 index 0e955742..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Devtools Cloudbuild v1 API -================================================= - -.. automodule:: google.cloud.devtools.cloudbuild_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 4bd8e2dd..00000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-build documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-build" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Devtools Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-build-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-build.tex", - u"google-cloud-build Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-build", - u"Google Cloud Devtools Cloudbuild Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-build", - u"google-cloud-build Documentation", - author, - "google-cloud-build", - "GAPIC library for Google Cloud Devtools Cloudbuild API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 37eed237..00000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - cloudbuild_v1/services - cloudbuild_v1/types diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py deleted file mode 100644 index 06ff95e8..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient - -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalResult -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApproveBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildApproval -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOptions -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildStep -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildTrigger -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import InlineSecret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PrivatePoolV1Config -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PubsubConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PullRequestFilter -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PushFilter -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepositoryEventConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepoSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Results -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RetryBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RunBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SecretManagerSecret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secrets -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Source -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSourceManifest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedMavenArtifact -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedNpmPackage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedPythonPackage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WebhookConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool - -__all__ = ('CloudBuildClient', - 'CloudBuildAsyncClient', - 'ApprovalConfig', - 'ApprovalResult', - 'ApproveBuildRequest', - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildApproval', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolOperationMetadata', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolOperationMetadata', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'GitSource', - 'Hash', - 'InlineSecret', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'PrivatePoolV1Config', - 'PubsubConfig', - 'PullRequestFilter', - 'PushFilter', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'RepositoryEventConfig', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'SecretManagerSecret', - 'Secrets', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'StorageSourceManifest', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolRequest', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'UploadedPythonPackage', - 'Volume', - 'WebhookConfig', - 'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py deleted file mode 100644 index 9fcffdb4..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.cloud_build import CloudBuildClient -from .services.cloud_build import CloudBuildAsyncClient - -from .types.cloudbuild import ApprovalConfig -from .types.cloudbuild import ApprovalResult -from .types.cloudbuild import ApproveBuildRequest -from .types.cloudbuild import ArtifactResult -from .types.cloudbuild import Artifacts -from .types.cloudbuild import Build -from .types.cloudbuild import BuildApproval -from .types.cloudbuild import BuildOperationMetadata -from .types.cloudbuild import BuildOptions -from .types.cloudbuild import BuildStep -from .types.cloudbuild import BuildTrigger -from .types.cloudbuild import BuiltImage -from .types.cloudbuild import CancelBuildRequest -from .types.cloudbuild import CreateBuildRequest -from .types.cloudbuild import CreateBuildTriggerRequest -from .types.cloudbuild import CreateWorkerPoolOperationMetadata -from .types.cloudbuild import CreateWorkerPoolRequest -from .types.cloudbuild import DeleteBuildTriggerRequest -from .types.cloudbuild import DeleteWorkerPoolOperationMetadata -from .types.cloudbuild import DeleteWorkerPoolRequest -from .types.cloudbuild import FileHashes -from .types.cloudbuild import GetBuildRequest -from .types.cloudbuild import GetBuildTriggerRequest -from .types.cloudbuild import GetWorkerPoolRequest -from .types.cloudbuild import GitHubEventsConfig -from .types.cloudbuild import GitSource -from .types.cloudbuild import Hash -from .types.cloudbuild import InlineSecret -from .types.cloudbuild import ListBuildsRequest -from .types.cloudbuild import ListBuildsResponse -from .types.cloudbuild import ListBuildTriggersRequest -from .types.cloudbuild import ListBuildTriggersResponse -from .types.cloudbuild import ListWorkerPoolsRequest -from .types.cloudbuild import ListWorkerPoolsResponse -from .types.cloudbuild import PrivatePoolV1Config -from .types.cloudbuild import PubsubConfig -from .types.cloudbuild import PullRequestFilter -from .types.cloudbuild import PushFilter -from .types.cloudbuild import ReceiveTriggerWebhookRequest -from .types.cloudbuild import ReceiveTriggerWebhookResponse -from .types.cloudbuild import RepositoryEventConfig -from .types.cloudbuild import RepoSource -from .types.cloudbuild import Results -from .types.cloudbuild import RetryBuildRequest -from .types.cloudbuild import RunBuildTriggerRequest -from .types.cloudbuild import Secret -from .types.cloudbuild import SecretManagerSecret -from .types.cloudbuild import Secrets -from .types.cloudbuild import Source -from .types.cloudbuild import SourceProvenance -from .types.cloudbuild import StorageSource -from .types.cloudbuild import StorageSourceManifest -from .types.cloudbuild import TimeSpan -from .types.cloudbuild import UpdateBuildTriggerRequest -from .types.cloudbuild import UpdateWorkerPoolOperationMetadata -from .types.cloudbuild import UpdateWorkerPoolRequest -from .types.cloudbuild import UploadedMavenArtifact -from .types.cloudbuild import UploadedNpmPackage -from .types.cloudbuild import UploadedPythonPackage -from .types.cloudbuild import Volume -from .types.cloudbuild import WebhookConfig -from .types.cloudbuild import WorkerPool - -__all__ = ( - 'CloudBuildAsyncClient', -'ApprovalConfig', -'ApprovalResult', -'ApproveBuildRequest', -'ArtifactResult', -'Artifacts', -'Build', -'BuildApproval', -'BuildOperationMetadata', -'BuildOptions', -'BuildStep', -'BuildTrigger', -'BuiltImage', -'CancelBuildRequest', -'CloudBuildClient', -'CreateBuildRequest', -'CreateBuildTriggerRequest', -'CreateWorkerPoolOperationMetadata', -'CreateWorkerPoolRequest', -'DeleteBuildTriggerRequest', -'DeleteWorkerPoolOperationMetadata', -'DeleteWorkerPoolRequest', -'FileHashes', -'GetBuildRequest', -'GetBuildTriggerRequest', -'GetWorkerPoolRequest', -'GitHubEventsConfig', -'GitSource', -'Hash', -'InlineSecret', -'ListBuildTriggersRequest', -'ListBuildTriggersResponse', -'ListBuildsRequest', -'ListBuildsResponse', -'ListWorkerPoolsRequest', -'ListWorkerPoolsResponse', -'PrivatePoolV1Config', -'PubsubConfig', -'PullRequestFilter', -'PushFilter', -'ReceiveTriggerWebhookRequest', -'ReceiveTriggerWebhookResponse', -'RepoSource', -'RepositoryEventConfig', -'Results', -'RetryBuildRequest', -'RunBuildTriggerRequest', -'Secret', -'SecretManagerSecret', -'Secrets', -'Source', -'SourceProvenance', -'StorageSource', -'StorageSourceManifest', -'TimeSpan', -'UpdateBuildTriggerRequest', -'UpdateWorkerPoolOperationMetadata', -'UpdateWorkerPoolRequest', -'UploadedMavenArtifact', -'UploadedNpmPackage', -'UploadedPythonPackage', -'Volume', -'WebhookConfig', -'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json deleted file mode 100644 index 2648fd24..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json +++ /dev/null @@ -1,298 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.devtools.cloudbuild_v1", - "protoPackage": "google.devtools.cloudbuild.v1", - "schema": "1.0", - "services": { - "CloudBuild": { - "clients": { - "grpc": { - "libraryClient": "CloudBuildClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudBuildAsyncClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - }, - "rest": { - "libraryClient": "CloudBuildClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py deleted file mode 100644 index 89a37dc9..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py deleted file mode 100644 index b796e7cd..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudBuildClient -from .async_client import CloudBuildAsyncClient - -__all__ = ( - 'CloudBuildClient', - 'CloudBuildAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py deleted file mode 100644 index 00eb8e04..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ /dev/null @@ -1,2601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .client import CloudBuildClient - - -class CloudBuildAsyncClient: - """Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - """ - - _client: CloudBuildClient - - DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT - - build_path = staticmethod(CloudBuildClient.build_path) - parse_build_path = staticmethod(CloudBuildClient.parse_build_path) - build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) - parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) - crypto_key_path = staticmethod(CloudBuildClient.crypto_key_path) - parse_crypto_key_path = staticmethod(CloudBuildClient.parse_crypto_key_path) - network_path = staticmethod(CloudBuildClient.network_path) - parse_network_path = staticmethod(CloudBuildClient.parse_network_path) - repository_path = staticmethod(CloudBuildClient.repository_path) - parse_repository_path = staticmethod(CloudBuildClient.parse_repository_path) - secret_version_path = staticmethod(CloudBuildClient.secret_version_path) - parse_secret_version_path = staticmethod(CloudBuildClient.parse_secret_version_path) - service_account_path = staticmethod(CloudBuildClient.service_account_path) - parse_service_account_path = staticmethod(CloudBuildClient.parse_service_account_path) - subscription_path = staticmethod(CloudBuildClient.subscription_path) - parse_subscription_path = staticmethod(CloudBuildClient.parse_subscription_path) - topic_path = staticmethod(CloudBuildClient.topic_path) - parse_topic_path = staticmethod(CloudBuildClient.parse_topic_path) - worker_pool_path = staticmethod(CloudBuildClient.worker_pool_path) - parse_worker_pool_path = staticmethod(CloudBuildClient.parse_worker_pool_path) - common_billing_account_path = staticmethod(CloudBuildClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudBuildClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CloudBuildClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudBuildClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudBuildClient.parse_common_organization_path) - common_project_path = staticmethod(CloudBuildClient.common_project_path) - parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) - common_location_path = staticmethod(CloudBuildClient.common_location_path) - parse_common_location_path = staticmethod(CloudBuildClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_info.__func__(CloudBuildAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_file.__func__(CloudBuildAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CloudBuildClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudBuildTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudBuildClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_build(self, - request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - build: Optional[cloudbuild.Build] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]]): - The request object. Request to create a new build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - build (:class:`google.cloud.devtools.cloudbuild_v1.types.Build`): - Required. Build resource to create. - This corresponds to the ``build`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, build]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_build(self, - request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.get_build(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]]): - The request object. Request to get a build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_build, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_builds(self, - request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsAsyncPager: - r"""Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]]): - The request object. Request to list builds. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - The raw filter text to constrain the - results. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager: - Response including listed builds. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, filter]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListBuildsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_builds, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBuildsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_build(self, - request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Cancels a build in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.cancel_build(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]]): - The request object. Request to cancel an ongoing build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CancelBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def retry_build(self, - request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]]): - The request object. Specifies a build to retry. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. Build ID of the original - build. - - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.RetryBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.retry_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def approve_build(self, - request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, - *, - name: Optional[str] = None, - approval_result: Optional[cloudbuild.ApprovalResult] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]]): - The request object. Request to approve or reject a - pending build. - name (:class:`str`): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - approval_result (:class:`google.cloud.devtools.cloudbuild_v1.types.ApprovalResult`): - Approval decision and metadata. - This corresponds to the ``approval_result`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, approval_result]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ApproveBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if approval_result is not None: - request.approval_result = approval_result - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.approve_build, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def create_build_trigger(self, - request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Creates a new ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.create_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]]): - The request object. Request to create a new ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project for which - to configure automatic builds. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): - Required. ``BuildTrigger`` to create. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_build_trigger(self, - request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Returns information about a ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = await client.get_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]]): - The request object. Returns the ``BuildTrigger`` with the specified ID. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_build_triggers(self, - request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersAsyncPager: - r"""Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]]): - The request object. Request to list existing ``BuildTriggers``. - project_id (:class:`str`): - Required. ID of the project for which - to list BuildTriggers. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager: - Response containing existing BuildTriggers. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListBuildTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_build_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBuildTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_build_trigger(self, - request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - await client.delete_build_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]]): - The request object. Request to delete a ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the ``BuildTrigger`` to delete. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.DeleteBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_build_trigger(self, - request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.update_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]]): - The request object. Request to update an existing ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the ``BuildTrigger`` to update. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): - Required. ``BuildTrigger`` to update. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.UpdateBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_build_trigger(self, - request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - source: Optional[cloudbuild.RepoSource] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]]): - The request object. Specifies a build trigger to run and - the source to use. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the trigger. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): - Source to build against this trigger. - Branch and tag names cannot consist of - regular expressions. - - This corresponds to the ``source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, source]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.RunBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def receive_trigger_webhook(self, - request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = await client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]]): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the response object for the - ReceiveTriggerWebhook method. - - """ - # Create or coerce a protobuf request object. - request = cloudbuild.ReceiveTriggerWebhookRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.receive_trigger_webhook, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger", request.trigger), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_worker_pool(self, - request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, - *, - parent: Optional[str] = None, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - worker_pool_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]]): - The request object. Request to create a new ``WorkerPool``. - parent (:class:`str`): - Required. The parent resource where this worker pool - will be created. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): - Required. ``WorkerPool`` resource to create. - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool_id (:class:`str`): - Required. Immutable. The ID to use for the - ``WorkerPool``, which will become the final component of - the resource name. - - This value should be 1-63 characters, and valid - characters are /[a-z][0-9]-/. - - This corresponds to the ``worker_pool_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, worker_pool, worker_pool_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if worker_pool is not None: - request.worker_pool = worker_pool - if worker_pool_id is not None: - request.worker_pool_id = worker_pool_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_worker_pool(self, - request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: - r"""Returns details of a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = await client.get_worker_pool(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]]): - The request object. Request to get a ``WorkerPool`` with the specified name. - name (:class:`str`): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.WorkerPool: - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_worker_pool, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_worker_pool(self, - request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]]): - The request object. Request to delete a ``WorkerPool``. - name (:class:`str`): - Required. The name of the ``WorkerPool`` to delete. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.DeleteWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_worker_pool(self, - request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, - *, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]]): - The request object. Request to update a ``WorkerPool``. - worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the - ``WorkerPool`` to update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask specifying which fields in ``worker_pool`` to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([worker_pool, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.UpdateWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if worker_pool is not None: - request.worker_pool = worker_pool - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("worker_pool.name", request.worker_pool.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_worker_pools(self, - request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkerPoolsAsyncPager: - r"""Lists ``WorkerPool``\ s. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]]): - The request object. Request to list ``WorkerPool``\ s. - parent (:class:`str`): - Required. The parent of the collection of - ``WorkerPools``. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager: - Response containing existing WorkerPools. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListWorkerPoolsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_worker_pools, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListWorkerPoolsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "CloudBuildAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CloudBuildAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py deleted file mode 100644 index 23c5a8ec..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ /dev/null @@ -1,2899 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudBuildGrpcTransport -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .transports.rest import CloudBuildRestTransport - - -class CloudBuildClientMeta(type): - """Metaclass for the CloudBuild client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] - _transport_registry["grpc"] = CloudBuildGrpcTransport - _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport - _transport_registry["rest"] = CloudBuildRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudBuildTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudBuildClient(metaclass=CloudBuildClientMeta): - """Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def build_path(project: str,build: str,) -> str: - """Returns a fully-qualified build string.""" - return "projects/{project}/builds/{build}".format(project=project, build=build, ) - - @staticmethod - def parse_build_path(path: str) -> Dict[str,str]: - """Parses a build path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def build_trigger_path(project: str,location: str,trigger: str,) -> str: - """Returns a fully-qualified build_trigger string.""" - return "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) - - @staticmethod - def parse_build_trigger_path(path: str) -> Dict[str,str]: - """Parses a build_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_path(project: str,location: str,keyring: str,key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def network_path(project: str,network: str,) -> str: - """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: - """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def repository_path(project: str,location: str,connection: str,repository: str,) -> str: - """Returns a fully-qualified repository string.""" - return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - - @staticmethod - def parse_repository_path(path: str) -> Dict[str,str]: - """Parses a repository path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def secret_version_path(project: str,secret: str,version: str,) -> str: - """Returns a fully-qualified secret_version string.""" - return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - - @staticmethod - def parse_secret_version_path(path: str) -> Dict[str,str]: - """Parses a secret_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_account_path(project: str,service_account: str,) -> str: - """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - - @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: - """Parses a service_account path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def subscription_path(project: str,subscription: str,) -> str: - """Returns a fully-qualified subscription string.""" - return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - - @staticmethod - def parse_subscription_path(path: str) -> Dict[str,str]: - """Parses a subscription path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def topic_path(project: str,topic: str,) -> str: - """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - - @staticmethod - def parse_topic_path(path: str) -> Dict[str,str]: - """Parses a topic path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def worker_pool_path(project: str,location: str,worker_pool: str,) -> str: - """Returns a fully-qualified worker_pool string.""" - return "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) - - @staticmethod - def parse_worker_pool_path(path: str) -> Dict[str,str]: - """Parses a worker_pool path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudBuildTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudBuildTransport): - # transport is a CloudBuildTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_build(self, - request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - build: Optional[cloudbuild.Build] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]): - The request object. Request to create a new build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Required. Build resource to create. - This corresponds to the ``build`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, build]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateBuildRequest): - request = cloudbuild.CreateBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def get_build(self, - request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.get_build(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]): - The request object. Request to get a build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetBuildRequest): - request = cloudbuild.GetBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_builds(self, - request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsPager: - r"""Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]): - The request object. Request to list builds. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - The raw filter text to constrain the - results. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager: - Response including listed builds. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, filter]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListBuildsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListBuildsRequest): - request = cloudbuild.ListBuildsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_builds] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBuildsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_build(self, - request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Cancels a build in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.cancel_build(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]): - The request object. Request to cancel an ongoing build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CancelBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CancelBuildRequest): - request = cloudbuild.CancelBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def retry_build(self, - request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]): - The request object. Specifies a build to retry. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. Build ID of the original - build. - - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.RetryBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.RetryBuildRequest): - request = cloudbuild.RetryBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.retry_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def approve_build(self, - request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, - *, - name: Optional[str] = None, - approval_result: Optional[cloudbuild.ApprovalResult] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]): - The request object. Request to approve or reject a - pending build. - name (str): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Approval decision and metadata. - This corresponds to the ``approval_result`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, approval_result]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ApproveBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ApproveBuildRequest): - request = cloudbuild.ApproveBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if approval_result is not None: - request.approval_result = approval_result - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.approve_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def create_build_trigger(self, - request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Creates a new ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = client.create_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]): - The request object. Request to create a new ``BuildTrigger``. - project_id (str): - Required. ID of the project for which - to configure automatic builds. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to create. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): - request = cloudbuild.CreateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_build_trigger(self, - request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Returns information about a ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = client.get_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]): - The request object. Returns the ``BuildTrigger`` with the specified ID. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetBuildTriggerRequest): - request = cloudbuild.GetBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_build_triggers(self, - request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersPager: - r"""Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]): - The request object. Request to list existing ``BuildTriggers``. - project_id (str): - Required. ID of the project for which - to list BuildTriggers. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager: - Response containing existing BuildTriggers. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListBuildTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListBuildTriggersRequest): - request = cloudbuild.ListBuildTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBuildTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_build_trigger(self, - request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - client.delete_build_trigger(request=request) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]): - The request object. Request to delete a ``BuildTrigger``. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to delete. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.DeleteBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): - request = cloudbuild.DeleteBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_build_trigger(self, - request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = client.update_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]): - The request object. Request to update an existing ``BuildTrigger``. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to update. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to update. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.UpdateBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): - request = cloudbuild.UpdateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.trigger.resource_name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_build_trigger(self, - request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - source: Optional[cloudbuild.RepoSource] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]): - The request object. Specifies a build trigger to run and - the source to use. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the trigger. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Source to build against this trigger. - Branch and tag names cannot consist of - regular expressions. - - This corresponds to the ``source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, source]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.RunBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.RunBuildTriggerRequest): - request = cloudbuild.RunBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def receive_trigger_webhook(self, - request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the response object for the - ReceiveTriggerWebhook method. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ReceiveTriggerWebhookRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ReceiveTriggerWebhookRequest): - request = cloudbuild.ReceiveTriggerWebhookRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.receive_trigger_webhook] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger", request.trigger), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_worker_pool(self, - request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, - *, - parent: Optional[str] = None, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - worker_pool_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]): - The request object. Request to create a new ``WorkerPool``. - parent (str): - Required. The parent resource where this worker pool - will be created. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. ``WorkerPool`` resource to create. - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool_id (str): - Required. Immutable. The ID to use for the - ``WorkerPool``, which will become the final component of - the resource name. - - This value should be 1-63 characters, and valid - characters are /[a-z][0-9]-/. - - This corresponds to the ``worker_pool_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, worker_pool, worker_pool_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): - request = cloudbuild.CreateWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if worker_pool is not None: - request.worker_pool = worker_pool - if worker_pool_id is not None: - request.worker_pool_id = worker_pool_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def get_worker_pool(self, - request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: - r"""Returns details of a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = client.get_worker_pool(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]): - The request object. Request to get a ``WorkerPool`` with the specified name. - name (str): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.WorkerPool: - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetWorkerPoolRequest): - request = cloudbuild.GetWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_worker_pool(self, - request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]): - The request object. Request to delete a ``WorkerPool``. - name (str): - Required. The name of the ``WorkerPool`` to delete. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.DeleteWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): - request = cloudbuild.DeleteWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def update_worker_pool(self, - request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, - *, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]): - The request object. Request to update a ``WorkerPool``. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the - ``WorkerPool`` to update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([worker_pool, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.UpdateWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): - request = cloudbuild.UpdateWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if worker_pool is not None: - request.worker_pool = worker_pool - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.worker_pool.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def list_worker_pools(self, - request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkerPoolsPager: - r"""Lists ``WorkerPool``\ s. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]): - The request object. Request to list ``WorkerPool``\ s. - parent (str): - Required. The parent of the collection of - ``WorkerPools``. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager: - Response containing existing WorkerPools. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListWorkerPoolsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): - request = cloudbuild.ListWorkerPoolsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListWorkerPoolsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CloudBuildClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CloudBuildClient", -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py deleted file mode 100644 index f255025e..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild - - -class ListBuildsPager: - """A pager for iterating through ``list_builds`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``builds`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBuilds`` requests and continue to iterate - through the ``builds`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildsResponse], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListBuildsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.Build]: - for page in self.pages: - yield from page.builds - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildsAsyncPager: - """A pager for iterating through ``list_builds`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``builds`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBuilds`` requests and continue to iterate - through the ``builds`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListBuildsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.Build]: - async def async_generator(): - async for page in self.pages: - for response in page.builds: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildTriggersPager: - """A pager for iterating through ``list_build_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBuildTriggers`` requests and continue to iterate - through the ``triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildTriggersResponse], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListBuildTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.BuildTrigger]: - for page in self.pages: - yield from page.triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildTriggersAsyncPager: - """A pager for iterating through ``list_build_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBuildTriggers`` requests and continue to iterate - through the ``triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListBuildTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.BuildTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkerPoolsPager: - """A pager for iterating through ``list_worker_pools`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``worker_pools`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListWorkerPools`` requests and continue to iterate - through the ``worker_pools`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListWorkerPoolsResponse], - request: cloudbuild.ListWorkerPoolsRequest, - response: cloudbuild.ListWorkerPoolsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListWorkerPoolsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListWorkerPoolsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.WorkerPool]: - for page in self.pages: - yield from page.worker_pools - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkerPoolsAsyncPager: - """A pager for iterating through ``list_worker_pools`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``worker_pools`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListWorkerPools`` requests and continue to iterate - through the ``worker_pools`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListWorkerPoolsResponse]], - request: cloudbuild.ListWorkerPoolsRequest, - response: cloudbuild.ListWorkerPoolsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListWorkerPoolsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListWorkerPoolsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.WorkerPool]: - async def async_generator(): - async for page in self.pages: - for response in page.worker_pools: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py deleted file mode 100644 index d7a78973..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudBuildTransport -from .grpc import CloudBuildGrpcTransport -from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .rest import CloudBuildRestTransport -from .rest import CloudBuildRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] -_transport_registry['grpc'] = CloudBuildGrpcTransport -_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport -_transport_registry['rest'] = CloudBuildRestTransport - -__all__ = ( - 'CloudBuildTransport', - 'CloudBuildGrpcTransport', - 'CloudBuildGrpcAsyncIOTransport', - 'CloudBuildRestTransport', - 'CloudBuildRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py deleted file mode 100644 index cf5f61cc..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ /dev/null @@ -1,443 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class CloudBuildTransport(abc.ABC): - """Abstract transport class for CloudBuild.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudbuild.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_build: gapic_v1.method.wrap_method( - self.create_build, - default_timeout=600.0, - client_info=client_info, - ), - self.get_build: gapic_v1.method.wrap_method( - self.get_build, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_builds: gapic_v1.method.wrap_method( - self.list_builds, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.cancel_build: gapic_v1.method.wrap_method( - self.cancel_build, - default_timeout=600.0, - client_info=client_info, - ), - self.retry_build: gapic_v1.method.wrap_method( - self.retry_build, - default_timeout=600.0, - client_info=client_info, - ), - self.approve_build: gapic_v1.method.wrap_method( - self.approve_build, - default_timeout=None, - client_info=client_info, - ), - self.create_build_trigger: gapic_v1.method.wrap_method( - self.create_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.get_build_trigger: gapic_v1.method.wrap_method( - self.get_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_build_triggers: gapic_v1.method.wrap_method( - self.list_build_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_build_trigger: gapic_v1.method.wrap_method( - self.delete_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.update_build_trigger: gapic_v1.method.wrap_method( - self.update_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.run_build_trigger: gapic_v1.method.wrap_method( - self.run_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.receive_trigger_webhook: gapic_v1.method.wrap_method( - self.receive_trigger_webhook, - default_timeout=None, - client_info=client_info, - ), - self.create_worker_pool: gapic_v1.method.wrap_method( - self.create_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.get_worker_pool: gapic_v1.method.wrap_method( - self.get_worker_pool, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_worker_pool: gapic_v1.method.wrap_method( - self.delete_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.update_worker_pool: gapic_v1.method.wrap_method( - self.update_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.list_worker_pools: gapic_v1.method.wrap_method( - self.list_worker_pools, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - Union[ - cloudbuild.Build, - Awaitable[cloudbuild.Build] - ]]: - raise NotImplementedError() - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - Union[ - cloudbuild.ListBuildsResponse, - Awaitable[cloudbuild.ListBuildsResponse] - ]]: - raise NotImplementedError() - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - Union[ - cloudbuild.Build, - Awaitable[cloudbuild.Build] - ]]: - raise NotImplementedError() - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Union[ - cloudbuild.ListBuildTriggersResponse, - Awaitable[cloudbuild.ListBuildTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - Union[ - cloudbuild.ReceiveTriggerWebhookResponse, - Awaitable[cloudbuild.ReceiveTriggerWebhookResponse] - ]]: - raise NotImplementedError() - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - Union[ - cloudbuild.WorkerPool, - Awaitable[cloudbuild.WorkerPool] - ]]: - raise NotImplementedError() - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Union[ - cloudbuild.ListWorkerPoolsResponse, - Awaitable[cloudbuild.ListWorkerPoolsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'CloudBuildTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py deleted file mode 100644 index 08b246b2..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ /dev/null @@ -1,793 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO - - -class CloudBuildGrpcTransport(CloudBuildTransport): - """gRPC backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the create build method over gRPC. - - Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - Returns: - Callable[[~.CreateBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', - request_serializer=cloudbuild.CreateBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_build'] - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - cloudbuild.Build]: - r"""Return a callable for the get build method over gRPC. - - Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - Returns: - Callable[[~.GetBuildRequest], - ~.Build]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', - request_serializer=cloudbuild.GetBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['get_build'] - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - cloudbuild.ListBuildsResponse]: - r"""Return a callable for the list builds method over gRPC. - - Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - Returns: - Callable[[~.ListBuildsRequest], - ~.ListBuildsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', - request_serializer=cloudbuild.ListBuildsRequest.serialize, - response_deserializer=cloudbuild.ListBuildsResponse.deserialize, - ) - return self._stubs['list_builds'] - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - cloudbuild.Build]: - r"""Return a callable for the cancel build method over gRPC. - - Cancels a build in progress. - - Returns: - Callable[[~.CancelBuildRequest], - ~.Build]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', - request_serializer=cloudbuild.CancelBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['cancel_build'] - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the retry build method over gRPC. - - Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - Returns: - Callable[[~.RetryBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', - request_serializer=cloudbuild.RetryBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['retry_build'] - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the approve build method over gRPC. - - Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - Returns: - Callable[[~.ApproveBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'approve_build' not in self._stubs: - self._stubs['approve_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', - request_serializer=cloudbuild.ApproveBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['approve_build'] - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the create build trigger method over gRPC. - - Creates a new ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.CreateBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', - request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['create_build_trigger'] - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the get build trigger method over gRPC. - - Returns information about a ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.GetBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', - request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['get_build_trigger'] - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - cloudbuild.ListBuildTriggersResponse]: - r"""Return a callable for the list build triggers method over gRPC. - - Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - Returns: - Callable[[~.ListBuildTriggersRequest], - ~.ListBuildTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', - request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, - response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, - ) - return self._stubs['list_build_triggers'] - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete build trigger method over gRPC. - - Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.DeleteBuildTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', - request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_build_trigger'] - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the update build trigger method over gRPC. - - Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.UpdateBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', - request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['update_build_trigger'] - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - operations_pb2.Operation]: - r"""Return a callable for the run build trigger method over gRPC. - - Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - Returns: - Callable[[~.RunBuildTriggerRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', - request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['run_build_trigger'] - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - cloudbuild.ReceiveTriggerWebhookResponse]: - r"""Return a callable for the receive trigger webhook method over gRPC. - - ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - Returns: - Callable[[~.ReceiveTriggerWebhookRequest], - ~.ReceiveTriggerWebhookResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'receive_trigger_webhook' not in self._stubs: - self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', - request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, - response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, - ) - return self._stubs['receive_trigger_webhook'] - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the create worker pool method over gRPC. - - Creates a ``WorkerPool``. - - Returns: - Callable[[~.CreateWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', - request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_worker_pool'] - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - cloudbuild.WorkerPool]: - r"""Return a callable for the get worker pool method over gRPC. - - Returns details of a ``WorkerPool``. - - Returns: - Callable[[~.GetWorkerPoolRequest], - ~.WorkerPool]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', - request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, - response_deserializer=cloudbuild.WorkerPool.deserialize, - ) - return self._stubs['get_worker_pool'] - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete worker pool method over gRPC. - - Deletes a ``WorkerPool``. - - Returns: - Callable[[~.DeleteWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', - request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_worker_pool'] - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the update worker pool method over gRPC. - - Updates a ``WorkerPool``. - - Returns: - Callable[[~.UpdateWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', - request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_worker_pool'] - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - cloudbuild.ListWorkerPoolsResponse]: - r"""Return a callable for the list worker pools method over gRPC. - - Lists ``WorkerPool``\ s. - - Returns: - Callable[[~.ListWorkerPoolsRequest], - ~.ListWorkerPoolsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', - request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, - response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, - ) - return self._stubs['list_worker_pools'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'CloudBuildGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py deleted file mode 100644 index e9f45881..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ /dev/null @@ -1,792 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudBuildGrpcTransport - - -class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): - """gRPC AsyncIO backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create build method over gRPC. - - Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - Returns: - Callable[[~.CreateBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', - request_serializer=cloudbuild.CreateBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_build'] - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - Awaitable[cloudbuild.Build]]: - r"""Return a callable for the get build method over gRPC. - - Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - Returns: - Callable[[~.GetBuildRequest], - Awaitable[~.Build]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', - request_serializer=cloudbuild.GetBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['get_build'] - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - Awaitable[cloudbuild.ListBuildsResponse]]: - r"""Return a callable for the list builds method over gRPC. - - Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - Returns: - Callable[[~.ListBuildsRequest], - Awaitable[~.ListBuildsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', - request_serializer=cloudbuild.ListBuildsRequest.serialize, - response_deserializer=cloudbuild.ListBuildsResponse.deserialize, - ) - return self._stubs['list_builds'] - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - Awaitable[cloudbuild.Build]]: - r"""Return a callable for the cancel build method over gRPC. - - Cancels a build in progress. - - Returns: - Callable[[~.CancelBuildRequest], - Awaitable[~.Build]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', - request_serializer=cloudbuild.CancelBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['cancel_build'] - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the retry build method over gRPC. - - Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - Returns: - Callable[[~.RetryBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', - request_serializer=cloudbuild.RetryBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['retry_build'] - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the approve build method over gRPC. - - Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - Returns: - Callable[[~.ApproveBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'approve_build' not in self._stubs: - self._stubs['approve_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', - request_serializer=cloudbuild.ApproveBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['approve_build'] - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the create build trigger method over gRPC. - - Creates a new ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.CreateBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', - request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['create_build_trigger'] - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the get build trigger method over gRPC. - - Returns information about a ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.GetBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', - request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['get_build_trigger'] - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Awaitable[cloudbuild.ListBuildTriggersResponse]]: - r"""Return a callable for the list build triggers method over gRPC. - - Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - Returns: - Callable[[~.ListBuildTriggersRequest], - Awaitable[~.ListBuildTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', - request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, - response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, - ) - return self._stubs['list_build_triggers'] - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete build trigger method over gRPC. - - Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.DeleteBuildTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', - request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_build_trigger'] - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the update build trigger method over gRPC. - - Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.UpdateBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', - request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['update_build_trigger'] - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the run build trigger method over gRPC. - - Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - Returns: - Callable[[~.RunBuildTriggerRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', - request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['run_build_trigger'] - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - Awaitable[cloudbuild.ReceiveTriggerWebhookResponse]]: - r"""Return a callable for the receive trigger webhook method over gRPC. - - ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - Returns: - Callable[[~.ReceiveTriggerWebhookRequest], - Awaitable[~.ReceiveTriggerWebhookResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'receive_trigger_webhook' not in self._stubs: - self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', - request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, - response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, - ) - return self._stubs['receive_trigger_webhook'] - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create worker pool method over gRPC. - - Creates a ``WorkerPool``. - - Returns: - Callable[[~.CreateWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', - request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_worker_pool'] - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - Awaitable[cloudbuild.WorkerPool]]: - r"""Return a callable for the get worker pool method over gRPC. - - Returns details of a ``WorkerPool``. - - Returns: - Callable[[~.GetWorkerPoolRequest], - Awaitable[~.WorkerPool]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', - request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, - response_deserializer=cloudbuild.WorkerPool.deserialize, - ) - return self._stubs['get_worker_pool'] - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete worker pool method over gRPC. - - Deletes a ``WorkerPool``. - - Returns: - Callable[[~.DeleteWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', - request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_worker_pool'] - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update worker pool method over gRPC. - - Updates a ``WorkerPool``. - - Returns: - Callable[[~.UpdateWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', - request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_worker_pool'] - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Awaitable[cloudbuild.ListWorkerPoolsResponse]]: - r"""Return a callable for the list worker pools method over gRPC. - - Lists ``WorkerPool``\ s. - - Returns: - Callable[[~.ListWorkerPoolsRequest], - Awaitable[~.ListWorkerPoolsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', - request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, - response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, - ) - return self._stubs['list_worker_pools'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'CloudBuildGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py deleted file mode 100644 index 8d6d8492..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py +++ /dev/null @@ -1,2419 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CloudBuildRestInterceptor: - """Interceptor for CloudBuild. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CloudBuildRestTransport. - - .. code-block:: python - class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): - def pre_approve_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_approve_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_cancel_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_builds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_builds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_build_triggers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_build_triggers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_worker_pools(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_worker_pools(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_receive_trigger_webhook(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_receive_trigger_webhook(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_retry_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_retry_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) - client = CloudBuildClient(transport=transport) - - - """ - def pre_approve_build(self, request: cloudbuild.ApproveBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ApproveBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for approve_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_approve_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for approve_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_cancel_build(self, request: cloudbuild.CancelBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CancelBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: - """Post-rpc interceptor for cancel_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_build(self, request: cloudbuild.CreateBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_build_trigger(self, request: cloudbuild.CreateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for create_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_worker_pool(self, request: cloudbuild.CreateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_delete_build_trigger(self, request: cloudbuild.DeleteBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def pre_delete_worker_pool(self, request: cloudbuild.DeleteWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_delete_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_build(self, request: cloudbuild.GetBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: - """Post-rpc interceptor for get_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_build_trigger(self, request: cloudbuild.GetBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for get_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_worker_pool(self, request: cloudbuild.GetWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_worker_pool(self, response: cloudbuild.WorkerPool) -> cloudbuild.WorkerPool: - """Post-rpc interceptor for get_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_builds(self, request: cloudbuild.ListBuildsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_builds - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_builds(self, response: cloudbuild.ListBuildsResponse) -> cloudbuild.ListBuildsResponse: - """Post-rpc interceptor for list_builds - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_build_triggers(self, request: cloudbuild.ListBuildTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildTriggersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_build_triggers - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_build_triggers(self, response: cloudbuild.ListBuildTriggersResponse) -> cloudbuild.ListBuildTriggersResponse: - """Post-rpc interceptor for list_build_triggers - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_worker_pools(self, request: cloudbuild.ListWorkerPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListWorkerPoolsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_worker_pools - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_worker_pools(self, response: cloudbuild.ListWorkerPoolsResponse) -> cloudbuild.ListWorkerPoolsResponse: - """Post-rpc interceptor for list_worker_pools - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_receive_trigger_webhook(self, request: cloudbuild.ReceiveTriggerWebhookRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ReceiveTriggerWebhookRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for receive_trigger_webhook - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_receive_trigger_webhook(self, response: cloudbuild.ReceiveTriggerWebhookResponse) -> cloudbuild.ReceiveTriggerWebhookResponse: - """Post-rpc interceptor for receive_trigger_webhook - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_retry_build(self, request: cloudbuild.RetryBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RetryBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for retry_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_retry_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for retry_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_run_build_trigger(self, request: cloudbuild.RunBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RunBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_run_build_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for run_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_update_build_trigger(self, request: cloudbuild.UpdateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_update_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for update_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_update_worker_pool(self, request: cloudbuild.UpdateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_update_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CloudBuildRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CloudBuildRestInterceptor - - -class CloudBuildRestTransport(CloudBuildTransport): - """REST backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudBuildRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudBuildRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=operations/**}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=operations/**}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _ApproveBuild(CloudBuildRestStub): - def __hash__(self): - return hash("ApproveBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ApproveBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the approve build method over HTTP. - - Args: - request (~.cloudbuild.ApproveBuildRequest): - The request object. Request to approve or reject a - pending build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/builds/*}:approve', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:approve', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_approve_build(request, metadata) - pb_request = cloudbuild.ApproveBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_approve_build(resp) - return resp - - class _CancelBuild(CloudBuildRestStub): - def __hash__(self): - return hash("CancelBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CancelBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.Build: - r"""Call the cancel build method over HTTP. - - Args: - request (~.cloudbuild.CancelBuildRequest): - The request object. Request to cancel an ongoing build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.Build: - A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find - source code, how to build it (for example, the builder - image to run on the source), and where to store the - built artifacts. - - Fields can include the following variables, which will - be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by - RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified - by RepoSource or resolved from the specified branch - or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds/{id}:cancel', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:cancel', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_cancel_build(request, metadata) - pb_request = cloudbuild.CancelBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.Build() - pb_resp = cloudbuild.Build.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_cancel_build(resp) - return resp - - class _CreateBuild(CloudBuildRestStub): - def __hash__(self): - return hash("CreateBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create build method over HTTP. - - Args: - request (~.cloudbuild.CreateBuildRequest): - The request object. Request to create a new build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds', - 'body': 'build', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/builds', - 'body': 'build', - }, - ] - request, metadata = self._interceptor.pre_create_build(request, metadata) - pb_request = cloudbuild.CreateBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_build(resp) - return resp - - class _CreateBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("CreateBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the create build trigger method over HTTP. - - Args: - request (~.cloudbuild.CreateBuildTriggerRequest): - The request object. Request to create a new ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers', - 'body': 'trigger', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', - }, - ] - request, metadata = self._interceptor.pre_create_build_trigger(request, metadata) - pb_request = cloudbuild.CreateBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_build_trigger(resp) - return resp - - class _CreateWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("CreateWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "workerPoolId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create worker pool method over HTTP. - - Args: - request (~.cloudbuild.CreateWorkerPoolRequest): - The request object. Request to create a new ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', - 'body': 'worker_pool', - }, - ] - request, metadata = self._interceptor.pre_create_worker_pool(request, metadata) - pb_request = cloudbuild.CreateWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_worker_pool(resp) - return resp - - class _DeleteBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("DeleteBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.DeleteBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete build trigger method over HTTP. - - Args: - request (~.cloudbuild.DeleteBuildTriggerRequest): - The request object. Request to delete a ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_build_trigger(request, metadata) - pb_request = cloudbuild.DeleteBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("DeleteWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.DeleteWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete worker pool method over HTTP. - - Args: - request (~.cloudbuild.DeleteWorkerPoolRequest): - The request object. Request to delete a ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_worker_pool(request, metadata) - pb_request = cloudbuild.DeleteWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_worker_pool(resp) - return resp - - class _GetBuild(CloudBuildRestStub): - def __hash__(self): - return hash("GetBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.Build: - r"""Call the get build method over HTTP. - - Args: - request (~.cloudbuild.GetBuildRequest): - The request object. Request to get a build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.Build: - A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find - source code, how to build it (for example, the builder - image to run on the source), and where to store the - built artifacts. - - Fields can include the following variables, which will - be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by - RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified - by RepoSource or resolved from the specified branch - or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/builds/{id}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}', - }, - ] - request, metadata = self._interceptor.pre_get_build(request, metadata) - pb_request = cloudbuild.GetBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.Build() - pb_resp = cloudbuild.Build.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_build(resp) - return resp - - class _GetBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("GetBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the get build trigger method over HTTP. - - Args: - request (~.cloudbuild.GetBuildTriggerRequest): - The request object. Returns the ``BuildTrigger`` with the specified ID. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] - request, metadata = self._interceptor.pre_get_build_trigger(request, metadata) - pb_request = cloudbuild.GetBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_build_trigger(resp) - return resp - - class _GetWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("GetWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.WorkerPool: - r"""Call the get worker pool method over HTTP. - - Args: - request (~.cloudbuild.GetWorkerPoolRequest): - The request object. Request to get a ``WorkerPool`` with the specified name. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.WorkerPool: - Configuration for a ``WorkerPool``. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a ``WorkerPool`` to run your - builds. Private ``WorkerPool``\ s give your builds - access to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see `Private - pools - overview `__. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', - }, - ] - request, metadata = self._interceptor.pre_get_worker_pool(request, metadata) - pb_request = cloudbuild.GetWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.WorkerPool() - pb_resp = cloudbuild.WorkerPool.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_worker_pool(resp) - return resp - - class _ListBuilds(CloudBuildRestStub): - def __hash__(self): - return hash("ListBuilds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListBuildsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListBuildsResponse: - r"""Call the list builds method over HTTP. - - Args: - request (~.cloudbuild.ListBuildsRequest): - The request object. Request to list builds. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListBuildsResponse: - Response including listed builds. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/builds', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/builds', - }, - ] - request, metadata = self._interceptor.pre_list_builds(request, metadata) - pb_request = cloudbuild.ListBuildsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListBuildsResponse() - pb_resp = cloudbuild.ListBuildsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_builds(resp) - return resp - - class _ListBuildTriggers(CloudBuildRestStub): - def __hash__(self): - return hash("ListBuildTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListBuildTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListBuildTriggersResponse: - r"""Call the list build triggers method over HTTP. - - Args: - request (~.cloudbuild.ListBuildTriggersRequest): - The request object. Request to list existing ``BuildTriggers``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListBuildTriggersResponse: - Response containing existing ``BuildTriggers``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/triggers', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - }, - ] - request, metadata = self._interceptor.pre_list_build_triggers(request, metadata) - pb_request = cloudbuild.ListBuildTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListBuildTriggersResponse() - pb_resp = cloudbuild.ListBuildTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_build_triggers(resp) - return resp - - class _ListWorkerPools(CloudBuildRestStub): - def __hash__(self): - return hash("ListWorkerPools") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListWorkerPoolsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListWorkerPoolsResponse: - r"""Call the list worker pools method over HTTP. - - Args: - request (~.cloudbuild.ListWorkerPoolsRequest): - The request object. Request to list ``WorkerPool``\ s. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListWorkerPoolsResponse: - Response containing existing ``WorkerPools``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', - }, - ] - request, metadata = self._interceptor.pre_list_worker_pools(request, metadata) - pb_request = cloudbuild.ListWorkerPoolsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListWorkerPoolsResponse() - pb_resp = cloudbuild.ListWorkerPoolsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_worker_pools(resp) - return resp - - class _ReceiveTriggerWebhook(CloudBuildRestStub): - def __hash__(self): - return hash("ReceiveTriggerWebhook") - - def __call__(self, - request: cloudbuild.ReceiveTriggerWebhookRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""Call the receive trigger webhook method over HTTP. - - Args: - request (~.cloudbuild.ReceiveTriggerWebhookRequest): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the - response object for the ReceiveTriggerWebhook method. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers/{trigger}:webhook', - 'body': 'body', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:webhook', - 'body': 'body', - }, - ] - request, metadata = self._interceptor.pre_receive_trigger_webhook(request, metadata) - pb_request = cloudbuild.ReceiveTriggerWebhookRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ReceiveTriggerWebhookResponse() - pb_resp = cloudbuild.ReceiveTriggerWebhookResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_receive_trigger_webhook(resp) - return resp - - class _RetryBuild(CloudBuildRestStub): - def __hash__(self): - return hash("RetryBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.RetryBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the retry build method over HTTP. - - Args: - request (~.cloudbuild.RetryBuildRequest): - The request object. Specifies a build to retry. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds/{id}:retry', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:retry', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_retry_build(request, metadata) - pb_request = cloudbuild.RetryBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_retry_build(resp) - return resp - - class _RunBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("RunBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.RunBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the run build trigger method over HTTP. - - Args: - request (~.cloudbuild.RunBuildTriggerRequest): - The request object. Specifies a build trigger to run and - the source to use. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}:run', - 'body': 'source', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:run', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_build_trigger(request, metadata) - pb_request = cloudbuild.RunBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_run_build_trigger(resp) - return resp - - class _UpdateBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("UpdateBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.UpdateBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the update build trigger method over HTTP. - - Args: - request (~.cloudbuild.UpdateBuildTriggerRequest): - The request object. Request to update an existing ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - 'body': 'trigger', - }, -{ - 'method': 'patch', - 'uri': '/v1/{trigger.resource_name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', - }, - ] - request, metadata = self._interceptor.pre_update_build_trigger(request, metadata) - pb_request = cloudbuild.UpdateBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_build_trigger(resp) - return resp - - class _UpdateWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("UpdateWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.UpdateWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update worker pool method over HTTP. - - Args: - request (~.cloudbuild.UpdateWorkerPoolRequest): - The request object. Request to update a ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}', - 'body': 'worker_pool', - }, - ] - request, metadata = self._interceptor.pre_update_worker_pool(request, metadata) - pb_request = cloudbuild.UpdateWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_worker_pool(resp) - return resp - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ApproveBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - cloudbuild.Build]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - cloudbuild.Build]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - cloudbuild.WorkerPool]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - cloudbuild.ListBuildsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuilds(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - cloudbuild.ListBuildTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuildTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - cloudbuild.ListWorkerPoolsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListWorkerPools(self._session, self._host, self._interceptor) # type: ignore - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - cloudbuild.ReceiveTriggerWebhookResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReceiveTriggerWebhook(self._session, self._host, self._interceptor) # type: ignore - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RetryBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'CloudBuildRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py deleted file mode 100644 index fab30741..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudbuild import ( - ApprovalConfig, - ApprovalResult, - ApproveBuildRequest, - ArtifactResult, - Artifacts, - Build, - BuildApproval, - BuildOperationMetadata, - BuildOptions, - BuildStep, - BuildTrigger, - BuiltImage, - CancelBuildRequest, - CreateBuildRequest, - CreateBuildTriggerRequest, - CreateWorkerPoolOperationMetadata, - CreateWorkerPoolRequest, - DeleteBuildTriggerRequest, - DeleteWorkerPoolOperationMetadata, - DeleteWorkerPoolRequest, - FileHashes, - GetBuildRequest, - GetBuildTriggerRequest, - GetWorkerPoolRequest, - GitHubEventsConfig, - GitSource, - Hash, - InlineSecret, - ListBuildsRequest, - ListBuildsResponse, - ListBuildTriggersRequest, - ListBuildTriggersResponse, - ListWorkerPoolsRequest, - ListWorkerPoolsResponse, - PrivatePoolV1Config, - PubsubConfig, - PullRequestFilter, - PushFilter, - ReceiveTriggerWebhookRequest, - ReceiveTriggerWebhookResponse, - RepositoryEventConfig, - RepoSource, - Results, - RetryBuildRequest, - RunBuildTriggerRequest, - Secret, - SecretManagerSecret, - Secrets, - Source, - SourceProvenance, - StorageSource, - StorageSourceManifest, - TimeSpan, - UpdateBuildTriggerRequest, - UpdateWorkerPoolOperationMetadata, - UpdateWorkerPoolRequest, - UploadedMavenArtifact, - UploadedNpmPackage, - UploadedPythonPackage, - Volume, - WebhookConfig, - WorkerPool, -) - -__all__ = ( - 'ApprovalConfig', - 'ApprovalResult', - 'ApproveBuildRequest', - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildApproval', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolOperationMetadata', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolOperationMetadata', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'GitSource', - 'Hash', - 'InlineSecret', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'PrivatePoolV1Config', - 'PubsubConfig', - 'PullRequestFilter', - 'PushFilter', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'RepositoryEventConfig', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'SecretManagerSecret', - 'Secrets', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'StorageSourceManifest', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolRequest', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'UploadedPythonPackage', - 'Volume', - 'WebhookConfig', - 'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py deleted file mode 100644 index 838474d2..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ /dev/null @@ -1,3680 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v1', - manifest={ - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'StorageSource', - 'GitSource', - 'RepoSource', - 'StorageSourceManifest', - 'Source', - 'BuiltImage', - 'UploadedPythonPackage', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'BuildStep', - 'Volume', - 'Results', - 'ArtifactResult', - 'Build', - 'Artifacts', - 'TimeSpan', - 'BuildOperationMetadata', - 'SourceProvenance', - 'FileHashes', - 'Hash', - 'Secrets', - 'InlineSecret', - 'SecretManagerSecret', - 'Secret', - 'CreateBuildRequest', - 'GetBuildRequest', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'CancelBuildRequest', - 'ApproveBuildRequest', - 'BuildApproval', - 'ApprovalConfig', - 'ApprovalResult', - 'BuildTrigger', - 'RepositoryEventConfig', - 'GitHubEventsConfig', - 'PubsubConfig', - 'WebhookConfig', - 'PullRequestFilter', - 'PushFilter', - 'CreateBuildTriggerRequest', - 'GetBuildTriggerRequest', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'DeleteBuildTriggerRequest', - 'UpdateBuildTriggerRequest', - 'BuildOptions', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'WorkerPool', - 'PrivatePoolV1Config', - 'CreateWorkerPoolRequest', - 'GetWorkerPoolRequest', - 'DeleteWorkerPoolRequest', - 'UpdateWorkerPoolRequest', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'CreateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolOperationMetadata', - 'DeleteWorkerPoolOperationMetadata', - }, -) - - -class RetryBuildRequest(proto.Message): - r"""Specifies a build to retry. - - Attributes: - name (str): - The name of the ``Build`` to retry. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. Build ID of the original build. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RunBuildTriggerRequest(proto.Message): - r"""Specifies a build trigger to run and the source to use. - - Attributes: - name (str): - The name of the ``Trigger`` to run. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project. - trigger_id (str): - Required. ID of the trigger. - source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Source to build against this trigger. - Branch and tag names cannot consist of regular - expressions. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=3, - message='RepoSource', - ) - - -class StorageSource(proto.Message): - r"""Location of the source in an archive file in Cloud Storage. - - Attributes: - bucket (str): - Cloud Storage bucket containing the source (see `Bucket Name - Requirements `__). - object_ (str): - Cloud Storage object containing the source. - - This object must be a zipped (``.zip``) or gzipped archive - file (``.tar.gz``) containing source to build. - generation (int): - Cloud Storage generation for the object. If - the generation is omitted, the latest generation - will be used. - """ - - bucket: str = proto.Field( - proto.STRING, - number=1, - ) - object_: str = proto.Field( - proto.STRING, - number=2, - ) - generation: int = proto.Field( - proto.INT64, - number=3, - ) - - -class GitSource(proto.Message): - r"""Location of the source in any accessible Git repository. - - Attributes: - url (str): - Location of the Git repo to build. - - This will be used as a ``git remote``, see - https://git-scm.com/docs/git-remote. - dir_ (str): - Directory, relative to the source root, in which to run the - build. - - This must be a relative path. If a step's ``dir`` is - specified and is an absolute path, this value is ignored for - that step's execution. - revision (str): - The revision to fetch from the Git repository such as a - branch, a tag, a commit SHA, or any Git ref. - - Cloud Build uses ``git fetch`` to fetch the revision from - the Git repository; therefore make sure that the string you - provide for ``revision`` is parsable by the command. For - information on string values accepted by ``git fetch``, see - https://git-scm.com/docs/gitrevisions#_specifying_revisions. - For information on ``git fetch``, see - https://git-scm.com/docs/git-fetch. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - dir_: str = proto.Field( - proto.STRING, - number=5, - ) - revision: str = proto.Field( - proto.STRING, - number=6, - ) - - -class RepoSource(proto.Message): - r"""Location of the source in a Google Cloud Source Repository. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - ID of the project that owns the Cloud Source - Repository. If omitted, the project ID - requesting the build is assumed. - repo_name (str): - Name of the Cloud Source Repository. - branch_name (str): - Regex matching branches to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``revision``. - tag_name (str): - Regex matching tags to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``revision``. - commit_sha (str): - Explicit commit SHA to build. - - This field is a member of `oneof`_ ``revision``. - dir_ (str): - Directory, relative to the source root, in which to run the - build. - - This must be a relative path. If a step's ``dir`` is - specified and is an absolute path, this value is ignored for - that step's execution. - invert_regex (bool): - Only trigger a build if the revision regex - does NOT match the revision regex. - substitutions (MutableMapping[str, str]): - Substitutions to use in a triggered build. - Should only be used with RunBuildTrigger - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - repo_name: str = proto.Field( - proto.STRING, - number=2, - ) - branch_name: str = proto.Field( - proto.STRING, - number=3, - oneof='revision', - ) - tag_name: str = proto.Field( - proto.STRING, - number=4, - oneof='revision', - ) - commit_sha: str = proto.Field( - proto.STRING, - number=5, - oneof='revision', - ) - dir_: str = proto.Field( - proto.STRING, - number=7, - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=8, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - - -class StorageSourceManifest(proto.Message): - r"""Location of the source manifest in Cloud Storage. This feature is in - Preview; see description - `here `__. - - Attributes: - bucket (str): - Cloud Storage bucket containing the source manifest (see - `Bucket Name - Requirements `__). - object_ (str): - Cloud Storage object containing the source - manifest. - This object must be a JSON file. - generation (int): - Cloud Storage generation for the object. If - the generation is omitted, the latest generation - will be used. - """ - - bucket: str = proto.Field( - proto.STRING, - number=1, - ) - object_: str = proto.Field( - proto.STRING, - number=2, - ) - generation: int = proto.Field( - proto.INT64, - number=3, - ) - - -class Source(proto.Message): - r"""Location of the source in a supported storage service. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): - If provided, get the source from this - location in Cloud Storage. - - This field is a member of `oneof`_ ``source``. - repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - If provided, get the source from this - location in a Cloud Source Repository. - - This field is a member of `oneof`_ ``source``. - git_source (google.cloud.devtools.cloudbuild_v1.types.GitSource): - If provided, get the source from this Git - repository. - - This field is a member of `oneof`_ ``source``. - storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - If provided, get the source from this manifest in Cloud - Storage. This feature is in Preview; see description - `here `__. - - This field is a member of `oneof`_ ``source``. - """ - - storage_source: 'StorageSource' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='StorageSource', - ) - repo_source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='RepoSource', - ) - git_source: 'GitSource' = proto.Field( - proto.MESSAGE, - number=5, - oneof='source', - message='GitSource', - ) - storage_source_manifest: 'StorageSourceManifest' = proto.Field( - proto.MESSAGE, - number=8, - oneof='source', - message='StorageSourceManifest', - ) - - -class BuiltImage(proto.Message): - r"""An image built by the pipeline. - - Attributes: - name (str): - Name used to push the container image to Google Container - Registry, as presented to ``docker push``. - digest (str): - Docker Registry 2.0 digest. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified image. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - digest: str = proto.Field( - proto.STRING, - number=3, - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=4, - message='TimeSpan', - ) - - -class UploadedPythonPackage(proto.Message): - r"""Artifact uploaded using the PythonPackage directive. - - Attributes: - uri (str): - URI of the uploaded artifact. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the Python Artifact. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class UploadedMavenArtifact(proto.Message): - r"""A Maven artifact uploaded using the MavenArtifact directive. - - Attributes: - uri (str): - URI of the uploaded artifact. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the Maven Artifact. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class UploadedNpmPackage(proto.Message): - r"""An npm package uploaded to Artifact Registry using the - NpmPackage directive. - - Attributes: - uri (str): - URI of the uploaded npm package. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the npm package. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class BuildStep(proto.Message): - r"""A step in the build pipeline. - - Attributes: - name (str): - Required. The name of the container image that will run this - particular build step. - - If the image is available in the host's Docker daemon's - cache, it will be run directly. If not, the host will - attempt to pull the image first, using the builder service - account's credentials if necessary. - - The Docker daemon's cache will already have the latest - versions of all of the officially supported build steps - (https://github.com/GoogleCloudPlatform/cloud-builders). The - Docker daemon will also have cached many of the layers for - some popular images, like "ubuntu", "debian", but they will - be refreshed at the time you attempt to use them. - - If you built an image in a previous build step, it will be - stored in the host's Docker daemon's cache and is available - to use as the name for a later build step. - env (MutableSequence[str]): - A list of environment variable definitions to - be used when running a step. - The elements are of the form "KEY=VALUE" for the - environment variable "KEY" being given the value - "VALUE". - args (MutableSequence[str]): - A list of arguments that will be presented to the step when - it is started. - - If the image used to run the step's container has an - entrypoint, the ``args`` are used as arguments to that - entrypoint. If the image does not define an entrypoint, the - first element in args is used as the entrypoint, and the - remainder will be used as arguments. - dir_ (str): - Working directory to use when running this step's container. - - If this value is a relative path, it is relative to the - build's working directory. If this value is absolute, it may - be outside the build's working directory, in which case the - contents of the path may not be persisted across build step - executions, unless a ``volume`` for that path is specified. - - If the build specifies a ``RepoSource`` with ``dir`` and a - step with a ``dir``, which specifies an absolute path, the - ``RepoSource`` ``dir`` is ignored for the step's execution. - id (str): - Unique identifier for this build step, used in ``wait_for`` - to reference this build step as a dependency. - wait_for (MutableSequence[str]): - The ID(s) of the step(s) that this build step depends on. - This build step will not start until all the build steps in - ``wait_for`` have completed successfully. If ``wait_for`` is - empty, this build step will start when all previous build - steps in the ``Build.Steps`` list have completed - successfully. - entrypoint (str): - Entrypoint to be used instead of the build - step image's default entrypoint. If unset, the - image's default entrypoint is used. - secret_env (MutableSequence[str]): - A list of environment variables which are encrypted using a - Cloud Key Management Service crypto key. These values must - be specified in the build's ``Secret``. - volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): - List of volumes to mount into the build step. - Each volume is created as an empty volume prior - to execution of the build step. Upon completion - of the build, volumes and their contents are - discarded. - - Using a named volume in only one step is not - valid as it is indicative of a build request - with an incorrect configuration. - timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - executing this build step. - pull_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pulling this build step's builder image only. - timeout (google.protobuf.duration_pb2.Duration): - Time limit for executing this build step. If - not defined, the step has no time limit and will - be allowed to continue to run until either it - completes or the build itself times out. - status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): - Output only. Status of the build step. At - this time, build step status is only updated on - build completion; step status is not updated in - real-time as the build progresses. - allow_failure (bool): - Allow this build step to fail without failing the entire - build. - - If false, the entire build will fail if this step fails. - Otherwise, the build will succeed, but this step will still - have a failure status. Error information will be reported in - the failure_detail field. - exit_code (int): - Output only. Return code from running the - step. - allow_exit_codes (MutableSequence[int]): - Allow this build step to fail without failing the entire - build if and only if the exit code is one of the specified - codes. If allow_failure is also specified, this field will - take precedence. - script (str): - A shell script to be executed in the step. - When script is provided, the user cannot specify - the entrypoint or args. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - dir_: str = proto.Field( - proto.STRING, - number=4, - ) - id: str = proto.Field( - proto.STRING, - number=5, - ) - wait_for: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - entrypoint: str = proto.Field( - proto.STRING, - number=7, - ) - secret_env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - volumes: MutableSequence['Volume'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='Volume', - ) - timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=10, - message='TimeSpan', - ) - pull_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=13, - message='TimeSpan', - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=11, - message=duration_pb2.Duration, - ) - status: 'Build.Status' = proto.Field( - proto.ENUM, - number=12, - enum='Build.Status', - ) - allow_failure: bool = proto.Field( - proto.BOOL, - number=14, - ) - exit_code: int = proto.Field( - proto.INT32, - number=16, - ) - allow_exit_codes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=18, - ) - script: str = proto.Field( - proto.STRING, - number=19, - ) - - -class Volume(proto.Message): - r"""Volume describes a Docker container volume which is mounted - into build steps in order to persist files across build step - execution. - - Attributes: - name (str): - Name of the volume to mount. - Volume names must be unique per build step and - must be valid names for Docker volumes. Each - named volume must be used by at least two build - steps. - path (str): - Path at which to mount the volume. - Paths must be absolute and cannot conflict with - other volume paths on the same build step or - with certain reserved volume paths. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Results(proto.Message): - r"""Artifacts created by the build pipeline. - - Attributes: - images (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuiltImage]): - Container images that were built as a part of - the build. - build_step_images (MutableSequence[str]): - List of build step digests, in the order - corresponding to build step indices. - artifact_manifest (str): - Path to the artifact manifest for - non-container artifacts uploaded to Cloud - Storage. Only populated when artifacts are - uploaded to Cloud Storage. - num_artifacts (int): - Number of non-container artifacts uploaded to - Cloud Storage. Only populated when artifacts are - uploaded to Cloud Storage. - build_step_outputs (MutableSequence[bytes]): - List of build step outputs, produced by builder images, in - the order corresponding to build step indices. - - `Cloud - Builders `__ - can produce this output by writing to - ``$BUILDER_OUTPUT/output``. Only the first 4KB of data is - stored. - artifact_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Time to push all non-container artifacts to - Cloud Storage. - python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedPythonPackage]): - Python artifacts uploaded to Artifact - Registry at the end of the build. - maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedMavenArtifact]): - Maven artifacts uploaded to Artifact Registry - at the end of the build. - npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedNpmPackage]): - Npm packages uploaded to Artifact Registry at - the end of the build. - """ - - images: MutableSequence['BuiltImage'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='BuiltImage', - ) - build_step_images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - artifact_manifest: str = proto.Field( - proto.STRING, - number=4, - ) - num_artifacts: int = proto.Field( - proto.INT64, - number=5, - ) - build_step_outputs: MutableSequence[bytes] = proto.RepeatedField( - proto.BYTES, - number=6, - ) - artifact_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=7, - message='TimeSpan', - ) - python_packages: MutableSequence['UploadedPythonPackage'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='UploadedPythonPackage', - ) - maven_artifacts: MutableSequence['UploadedMavenArtifact'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='UploadedMavenArtifact', - ) - npm_packages: MutableSequence['UploadedNpmPackage'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='UploadedNpmPackage', - ) - - -class ArtifactResult(proto.Message): - r"""An artifact that was uploaded during a build. This - is a single record in the artifact manifest JSON file. - - Attributes: - location (str): - The path of an artifact in a Cloud Storage bucket, with the - generation number. For example, - ``gs://mybucket/path/to/output.jar#generation``. - file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): - The file hash of the artifact. - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - file_hash: MutableSequence['FileHashes'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - - -class Build(proto.Message): - r"""A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find source code, - how to build it (for example, the builder image to run on the - source), and where to store the built artifacts. - - Fields can include the following variables, which will be expanded - when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by RepoSource. - - $BRANCH_NAME: the branch name specified by RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by - RepoSource or resolved from the specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. - - Attributes: - name (str): - Output only. The 'Build' name with format: - ``projects/{project}/locations/{location}/builds/{build}``, - where {build} is a unique identifier generated by the - service. - id (str): - Output only. Unique identifier of the build. - project_id (str): - Output only. ID of the project. - status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): - Output only. Status of the build. - status_detail (str): - Output only. Customer-readable message about - the current status. - source (google.cloud.devtools.cloudbuild_v1.types.Source): - The location of the source files to build. - steps (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildStep]): - Required. The operations to be performed on - the workspace. - results (google.cloud.devtools.cloudbuild_v1.types.Results): - Output only. Results of the build. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to - create the build was received. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which execution of the - build was started. - finish_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which execution of the build was - finished. - - The difference between finish_time and start_time is the - duration of the build's execution. - timeout (google.protobuf.duration_pb2.Duration): - Amount of time that this build should be allowed to run, to - second granularity. If this amount of time elapses, work on - the build will cease and the build status will be - ``TIMEOUT``. - - ``timeout`` starts ticking from ``startTime``. - - Default time is 60 minutes. - images (MutableSequence[str]): - A list of images to be pushed upon the successful completion - of all build steps. - - The images are pushed using the builder service account's - credentials. - - The digests of the pushed images will be stored in the - ``Build`` resource's results field. - - If any of the images fail to be pushed, the build status is - marked ``FAILURE``. - queue_ttl (google.protobuf.duration_pb2.Duration): - TTL in queue for this build. If provided and the build is - enqueued longer than this value, the build will expire and - the build status will be ``EXPIRED``. - - The TTL starts ticking from create_time. - artifacts (google.cloud.devtools.cloudbuild_v1.types.Artifacts): - Artifacts produced by the build that should - be uploaded upon successful completion of all - build steps. - logs_bucket (str): - Cloud Storage bucket where logs should be written (see - `Bucket Name - Requirements `__). - Logs file names will be of the format - ``${logs_bucket}/log-${build_id}.txt``. - source_provenance (google.cloud.devtools.cloudbuild_v1.types.SourceProvenance): - Output only. A permanent fixed identifier for - source. - build_trigger_id (str): - Output only. The ID of the ``BuildTrigger`` that triggered - this build, if it was triggered automatically. - options (google.cloud.devtools.cloudbuild_v1.types.BuildOptions): - Special options for this build. - log_url (str): - Output only. URL to logs for this build in - Google Cloud Console. - substitutions (MutableMapping[str, str]): - Substitutions data for ``Build`` resource. - tags (MutableSequence[str]): - Tags for annotation of a ``Build``. These are not docker - tags. - secrets (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Secret]): - Secrets to decrypt using Cloud Key Management Service. Note: - Secret Manager is the recommended technique for managing - sensitive data with Cloud Build. Use ``available_secrets`` - to configure builds to access secrets from Secret Manager. - For instructions, see: - https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets - timing (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.TimeSpan]): - Output only. Stores timing information for phases of the - build. Valid keys are: - - - BUILD: time to execute all build steps. - - PUSH: time to push all artifacts including docker images - and non docker artifacts. - - FETCHSOURCE: time to fetch source. - - SETUPBUILD: time to set up build. - - If the build does not specify source or images, these keys - will not be included. - approval (google.cloud.devtools.cloudbuild_v1.types.BuildApproval): - Output only. Describes this build's approval - configuration, status, and result. - service_account (str): - IAM service account whose credentials will be used at build - runtime. Must be of the format - ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT - can be email address or uniqueId of the service account. - available_secrets (google.cloud.devtools.cloudbuild_v1.types.Secrets): - Secrets and secret environment variables. - warnings (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build.Warning]): - Output only. Non-fatal problems encountered - during the execution of the build. - failure_info (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo): - Output only. Contains information about the - build when status=FAILURE. - """ - class Status(proto.Enum): - r"""Possible status of a build or build step. - - Values: - STATUS_UNKNOWN (0): - Status of the build is unknown. - PENDING (10): - Build has been created and is pending - execution and queuing. It has not been queued. - QUEUED (1): - Build or step is queued; work has not yet - begun. - WORKING (2): - Build or step is being executed. - SUCCESS (3): - Build or step finished successfully. - FAILURE (4): - Build or step failed to complete - successfully. - INTERNAL_ERROR (5): - Build or step failed due to an internal - cause. - TIMEOUT (6): - Build or step took longer than was allowed. - CANCELLED (7): - Build or step was canceled by a user. - EXPIRED (9): - Build was enqueued for longer than the value of - ``queue_ttl``. - """ - STATUS_UNKNOWN = 0 - PENDING = 10 - QUEUED = 1 - WORKING = 2 - SUCCESS = 3 - FAILURE = 4 - INTERNAL_ERROR = 5 - TIMEOUT = 6 - CANCELLED = 7 - EXPIRED = 9 - - class Warning(proto.Message): - r"""A non-fatal problem encountered during the execution of the - build. - - Attributes: - text (str): - Explanation of the warning generated. - priority (google.cloud.devtools.cloudbuild_v1.types.Build.Warning.Priority): - The priority for this warning. - """ - class Priority(proto.Enum): - r"""The relative importance of this warning. - - Values: - PRIORITY_UNSPECIFIED (0): - Should not be used. - INFO (1): - e.g. deprecation warnings and alternative - feature highlights. - WARNING (2): - e.g. automated detection of possible issues - with the build. - ALERT (3): - e.g. alerts that a feature used in the build - is pending removal - """ - PRIORITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ALERT = 3 - - text: str = proto.Field( - proto.STRING, - number=1, - ) - priority: 'Build.Warning.Priority' = proto.Field( - proto.ENUM, - number=2, - enum='Build.Warning.Priority', - ) - - class FailureInfo(proto.Message): - r"""A fatal problem encountered during the execution of the - build. - - Attributes: - type_ (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo.FailureType): - The name of the failure. - detail (str): - Explains the failure issue in more detail - using hard-coded text. - """ - class FailureType(proto.Enum): - r"""The name of a fatal problem encountered during the execution - of the build. - - Values: - FAILURE_TYPE_UNSPECIFIED (0): - Type unspecified - PUSH_FAILED (1): - Unable to push the image to the repository. - PUSH_IMAGE_NOT_FOUND (2): - Final image not found. - PUSH_NOT_AUTHORIZED (3): - Unauthorized push of the final image. - LOGGING_FAILURE (4): - Backend logging failures. Should retry. - USER_BUILD_STEP (5): - A build step has failed. - FETCH_SOURCE_FAILED (6): - The source fetching has failed. - """ - FAILURE_TYPE_UNSPECIFIED = 0 - PUSH_FAILED = 1 - PUSH_IMAGE_NOT_FOUND = 2 - PUSH_NOT_AUTHORIZED = 3 - LOGGING_FAILURE = 4 - USER_BUILD_STEP = 5 - FETCH_SOURCE_FAILED = 6 - - type_: 'Build.FailureInfo.FailureType' = proto.Field( - proto.ENUM, - number=1, - enum='Build.FailureInfo.FailureType', - ) - detail: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=45, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=16, - ) - status: Status = proto.Field( - proto.ENUM, - number=2, - enum=Status, - ) - status_detail: str = proto.Field( - proto.STRING, - number=24, - ) - source: 'Source' = proto.Field( - proto.MESSAGE, - number=3, - message='Source', - ) - steps: MutableSequence['BuildStep'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='BuildStep', - ) - results: 'Results' = proto.Field( - proto.MESSAGE, - number=10, - message='Results', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - finish_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=12, - message=duration_pb2.Duration, - ) - images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - queue_ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=40, - message=duration_pb2.Duration, - ) - artifacts: 'Artifacts' = proto.Field( - proto.MESSAGE, - number=37, - message='Artifacts', - ) - logs_bucket: str = proto.Field( - proto.STRING, - number=19, - ) - source_provenance: 'SourceProvenance' = proto.Field( - proto.MESSAGE, - number=21, - message='SourceProvenance', - ) - build_trigger_id: str = proto.Field( - proto.STRING, - number=22, - ) - options: 'BuildOptions' = proto.Field( - proto.MESSAGE, - number=23, - message='BuildOptions', - ) - log_url: str = proto.Field( - proto.STRING, - number=25, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=29, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=31, - ) - secrets: MutableSequence['Secret'] = proto.RepeatedField( - proto.MESSAGE, - number=32, - message='Secret', - ) - timing: MutableMapping[str, 'TimeSpan'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=33, - message='TimeSpan', - ) - approval: 'BuildApproval' = proto.Field( - proto.MESSAGE, - number=44, - message='BuildApproval', - ) - service_account: str = proto.Field( - proto.STRING, - number=42, - ) - available_secrets: 'Secrets' = proto.Field( - proto.MESSAGE, - number=47, - message='Secrets', - ) - warnings: MutableSequence[Warning] = proto.RepeatedField( - proto.MESSAGE, - number=49, - message=Warning, - ) - failure_info: FailureInfo = proto.Field( - proto.MESSAGE, - number=51, - message=FailureInfo, - ) - - -class Artifacts(proto.Message): - r"""Artifacts produced by a build that should be uploaded upon - successful completion of all build steps. - - Attributes: - images (MutableSequence[str]): - A list of images to be pushed upon the - successful completion of all build steps. - - The images will be pushed using the builder - service account's credentials. - The digests of the pushed images will be stored - in the Build resource's results field. - - If any of the images fail to be pushed, the - build is marked FAILURE. - objects (google.cloud.devtools.cloudbuild_v1.types.Artifacts.ArtifactObjects): - A list of objects to be uploaded to Cloud - Storage upon successful completion of all build - steps. - Files in the workspace matching specified paths - globs will be uploaded to the specified Cloud - Storage location using the builder service - account's credentials. - - The location and generation of the uploaded - objects will be stored in the Build resource's - results field. - - If any objects fail to be pushed, the build is - marked FAILURE. - maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.MavenArtifact]): - A list of Maven artifacts to be uploaded to - Artifact Registry upon successful completion of - all build steps. - Artifacts in the workspace matching specified - paths globs will be uploaded to the specified - Artifact Registry repository using the builder - service account's credentials. - - If any artifacts fail to be pushed, the build is - marked FAILURE. - python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.PythonPackage]): - A list of Python packages to be uploaded to - Artifact Registry upon successful completion of - all build steps. - The build service account credentials will be - used to perform the upload. - If any objects fail to be pushed, the build is - marked FAILURE. - npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.NpmPackage]): - A list of npm packages to be uploaded to - Artifact Registry upon successful completion of - all build steps. - Npm packages in the specified paths will be - uploaded to the specified Artifact Registry - repository using the builder service account's - credentials. - - If any packages fail to be pushed, the build is - marked FAILURE. - """ - - class ArtifactObjects(proto.Message): - r"""Files in the workspace to upload to Cloud Storage upon - successful completion of all build steps. - - Attributes: - location (str): - Cloud Storage bucket and optional object path, in the form - "gs://bucket/path/to/somewhere/". (see `Bucket Name - Requirements `__). - - Files in the workspace matching any path pattern will be - uploaded to Cloud Storage with this location as a prefix. - paths (MutableSequence[str]): - Path globs used to match files in the build's - workspace. - timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing all artifact objects. - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - class MavenArtifact(proto.Message): - r"""A Maven artifact to upload to Artifact Registry upon - successful completion of all build steps. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-maven.pkg.dev/$PROJECT/$REPOSITORY" - Artifact in the workspace specified by path will - be uploaded to Artifact Registry with this - location as a prefix. - path (str): - Path to an artifact in the build's workspace - to be uploaded to Artifact Registry. - This can be either an absolute path, - e.g. - /workspace/my-app/target/my-app-1.0.SNAPSHOT.jar - or a relative path from /workspace, - e.g. my-app/target/my-app-1.0.SNAPSHOT.jar. - artifact_id (str): - Maven ``artifactId`` value used when uploading the artifact - to Artifact Registry. - group_id (str): - Maven ``groupId`` value used when uploading the artifact to - Artifact Registry. - version (str): - Maven ``version`` value used when uploading the artifact to - Artifact Registry. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - artifact_id: str = proto.Field( - proto.STRING, - number=3, - ) - group_id: str = proto.Field( - proto.STRING, - number=4, - ) - version: str = proto.Field( - proto.STRING, - number=5, - ) - - class PythonPackage(proto.Message): - r"""Python package to upload to Artifact Registry upon successful - completion of all build steps. A package can encapsulate - multiple objects to be uploaded to a single repository. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-python.pkg.dev/$PROJECT/$REPOSITORY" - Files in the workspace matching any path pattern - will be uploaded to Artifact Registry with this - location as a prefix. - paths (MutableSequence[str]): - Path globs used to match files in the build's workspace. For - Python/ Twine, this is usually ``dist/*``, and sometimes - additionally an ``.asc`` file. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class NpmPackage(proto.Message): - r"""Npm package to upload to Artifact Registry upon successful - completion of all build steps. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-npm.pkg.dev/$PROJECT/$REPOSITORY" - Npm package in the workspace specified by path - will be zipped and uploaded to Artifact Registry - with this location as a prefix. - package_path (str): - Path to the package.json. - e.g. workspace/path/to/package - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - package_path: str = proto.Field( - proto.STRING, - number=2, - ) - - images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - objects: ArtifactObjects = proto.Field( - proto.MESSAGE, - number=2, - message=ArtifactObjects, - ) - maven_artifacts: MutableSequence[MavenArtifact] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=MavenArtifact, - ) - python_packages: MutableSequence[PythonPackage] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=PythonPackage, - ) - npm_packages: MutableSequence[NpmPackage] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=NpmPackage, - ) - - -class TimeSpan(proto.Message): - r"""Start and end times for a build execution phase. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start of time span. - end_time (google.protobuf.timestamp_pb2.Timestamp): - End of time span. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class BuildOperationMetadata(proto.Message): - r"""Metadata for build operations. - - Attributes: - build (google.cloud.devtools.cloudbuild_v1.types.Build): - The build that the operation is tracking. - """ - - build: 'Build' = proto.Field( - proto.MESSAGE, - number=1, - message='Build', - ) - - -class SourceProvenance(proto.Message): - r"""Provenance of the source. Ways to find the original source, - or verify that some source was used for this build. - - Attributes: - resolved_storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): - A copy of the build's ``source.storage_source``, if exists, - with any generations resolved. - resolved_repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - A copy of the build's ``source.repo_source``, if exists, - with any revisions resolved. - resolved_storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - A copy of the build's ``source.storage_source_manifest``, if - exists, with any revisions resolved. This feature is in - Preview. - file_hashes (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.FileHashes]): - Output only. Hash(es) of the build source, which can be used - to verify that the original source integrity was maintained - in the build. Note that ``FileHashes`` will only be - populated if ``BuildOptions`` has requested a - ``SourceProvenanceHash``. - - The keys to this map are file paths used as build source and - the values contain the hash values for those files. - - If the build source came in a single package such as a - gzipped tarfile (``.tar.gz``), the ``FileHash`` will be for - the single path to that file. - """ - - resolved_storage_source: 'StorageSource' = proto.Field( - proto.MESSAGE, - number=3, - message='StorageSource', - ) - resolved_repo_source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=6, - message='RepoSource', - ) - resolved_storage_source_manifest: 'StorageSourceManifest' = proto.Field( - proto.MESSAGE, - number=9, - message='StorageSourceManifest', - ) - file_hashes: MutableMapping[str, 'FileHashes'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=4, - message='FileHashes', - ) - - -class FileHashes(proto.Message): - r"""Container message for hashes of byte content of files, used - in SourceProvenance messages to verify integrity of source input - to the build. - - Attributes: - file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash]): - Collection of file hashes. - """ - - file_hash: MutableSequence['Hash'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Hash', - ) - - -class Hash(proto.Message): - r"""Container message for hash values. - - Attributes: - type_ (google.cloud.devtools.cloudbuild_v1.types.Hash.HashType): - The type of hash that was performed. - value (bytes): - The hash value. - """ - class HashType(proto.Enum): - r"""Specifies the hash algorithm, if any. - - Values: - NONE (0): - No hash requested. - SHA256 (1): - Use a sha256 hash. - MD5 (2): - Use a md5 hash. - SHA512 (4): - Use a sha512 hash. - """ - NONE = 0 - SHA256 = 1 - MD5 = 2 - SHA512 = 4 - - type_: HashType = proto.Field( - proto.ENUM, - number=1, - enum=HashType, - ) - value: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class Secrets(proto.Message): - r"""Secrets and secret environment variables. - - Attributes: - secret_manager (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.SecretManagerSecret]): - Secrets in Secret Manager and associated - secret environment variable. - inline (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.InlineSecret]): - Secrets encrypted with KMS key and the - associated secret environment variable. - """ - - secret_manager: MutableSequence['SecretManagerSecret'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SecretManagerSecret', - ) - inline: MutableSequence['InlineSecret'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InlineSecret', - ) - - -class InlineSecret(proto.Message): - r"""Pairs a set of secret environment variables mapped to - encrypted values with the Cloud KMS key to use to decrypt the - value. - - Attributes: - kms_key_name (str): - Resource name of Cloud KMS crypto key to decrypt the - encrypted value. In format: - projects/\ */locations/*/keyRings/*/cryptoKeys/* - env_map (MutableMapping[str, bytes]): - Map of environment variable name to its - encrypted value. - Secret environment variables must be unique - across all of a build's secrets, and must be - used by at least one build step. Values can be - at most 64 KB in size. There can be at most 100 - secret values across all of a build's secrets. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - env_map: MutableMapping[str, bytes] = proto.MapField( - proto.STRING, - proto.BYTES, - number=2, - ) - - -class SecretManagerSecret(proto.Message): - r"""Pairs a secret environment variable with a SecretVersion in - Secret Manager. - - Attributes: - version_name (str): - Resource name of the SecretVersion. In format: - projects/\ */secrets/*/versions/\* - env (str): - Environment variable name to associate with - the secret. Secret environment variables must be - unique across all of a build's secrets, and must - be used by at least one build step. - """ - - version_name: str = proto.Field( - proto.STRING, - number=1, - ) - env: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Secret(proto.Message): - r"""Pairs a set of secret environment variables containing encrypted - values with the Cloud KMS key to use to decrypt the value. Note: Use - ``kmsKeyName`` with ``available_secrets`` instead of using - ``kmsKeyName`` with ``secret``. For instructions see: - https://cloud.google.com/cloud-build/docs/securing-builds/use-encrypted-credentials. - - Attributes: - kms_key_name (str): - Cloud KMS key name to use to decrypt these - envs. - secret_env (MutableMapping[str, bytes]): - Map of environment variable name to its - encrypted value. - Secret environment variables must be unique - across all of a build's secrets, and must be - used by at least one build step. Values can be - at most 64 KB in size. There can be at most 100 - secret values across all of a build's secrets. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - secret_env: MutableMapping[str, bytes] = proto.MapField( - proto.STRING, - proto.BYTES, - number=3, - ) - - -class CreateBuildRequest(proto.Message): - r"""Request to create a new build. - - Attributes: - parent (str): - The parent resource where this build will be created. - Format: ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Required. Build resource to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - build: 'Build' = proto.Field( - proto.MESSAGE, - number=2, - message='Build', - ) - - -class GetBuildRequest(proto.Message): - r"""Request to get a build. - - Attributes: - name (str): - The name of the ``Build`` to retrieve. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. ID of the build. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBuildsRequest(proto.Message): - r"""Request to list builds. - - Attributes: - parent (str): - The parent of the collection of ``Builds``. Format: - ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project. - page_size (int): - Number of results to return in the list. - page_token (str): - The page token for the next page of Builds. - - If unspecified, the first page of results is returned. - - If the token is rejected for any reason, INVALID_ARGUMENT - will be thrown. In this case, the token should be discarded, - and pagination should be restarted from the first page of - results. - - See https://google.aip.dev/158 for more. - filter (str): - The raw filter text to constrain the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=9, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=8, - ) - - -class ListBuildsResponse(proto.Message): - r"""Response including listed builds. - - Attributes: - builds (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build]): - Builds will be sorted by ``create_time``, descending. - next_page_token (str): - Token to receive the next page of results. - This will be absent if the end of the response - list has been reached. - """ - - @property - def raw_page(self): - return self - - builds: MutableSequence['Build'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Build', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelBuildRequest(proto.Message): - r"""Request to cancel an ongoing build. - - Attributes: - name (str): - The name of the ``Build`` to cancel. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. ID of the build. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ApproveBuildRequest(proto.Message): - r"""Request to approve or reject a pending build. - - Attributes: - name (str): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}". - approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Approval decision and metadata. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - approval_result: 'ApprovalResult' = proto.Field( - proto.MESSAGE, - number=2, - message='ApprovalResult', - ) - - -class BuildApproval(proto.Message): - r"""BuildApproval describes a build's approval configuration, - state, and result. - - Attributes: - state (google.cloud.devtools.cloudbuild_v1.types.BuildApproval.State): - Output only. The state of this build's - approval. - config (google.cloud.devtools.cloudbuild_v1.types.ApprovalConfig): - Output only. Configuration for manual - approval of this build. - result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Output only. Result of manual approval for - this Build. - """ - class State(proto.Enum): - r"""Specifies the current state of a build's approval. - - Values: - STATE_UNSPECIFIED (0): - Default enum type. This should not be used. - PENDING (1): - Build approval is pending. - APPROVED (2): - Build approval has been approved. - REJECTED (3): - Build approval has been rejected. - CANCELLED (5): - Build was cancelled while it was still - pending approval. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - APPROVED = 2 - REJECTED = 3 - CANCELLED = 5 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - config: 'ApprovalConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='ApprovalConfig', - ) - result: 'ApprovalResult' = proto.Field( - proto.MESSAGE, - number=3, - message='ApprovalResult', - ) - - -class ApprovalConfig(proto.Message): - r"""ApprovalConfig describes configuration for manual approval of - a build. - - Attributes: - approval_required (bool): - Whether or not approval is needed. If this is - set on a build, it will become pending when - created, and will need to be explicitly approved - to start. - """ - - approval_required: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ApprovalResult(proto.Message): - r"""ApprovalResult describes the decision and associated metadata - of a manual approval of a build. - - Attributes: - approver_account (str): - Output only. Email of the user that called - the ApproveBuild API to approve or reject a - build at the time that the API was called. - approval_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the approval - decision was made. - decision (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult.Decision): - Required. The decision of this manual - approval. - comment (str): - Optional. An optional comment for this manual - approval result. - url (str): - Optional. An optional URL tied to this manual - approval result. This field is essentially the - same as comment, except that it will be rendered - by the UI differently. An example use case is a - link to an external job that approved this - Build. - """ - class Decision(proto.Enum): - r"""Specifies whether or not this manual approval result is to - approve or reject a build. - - Values: - DECISION_UNSPECIFIED (0): - Default enum type. This should not be used. - APPROVED (1): - Build is approved. - REJECTED (2): - Build is rejected. - """ - DECISION_UNSPECIFIED = 0 - APPROVED = 1 - REJECTED = 2 - - approver_account: str = proto.Field( - proto.STRING, - number=2, - ) - approval_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - decision: Decision = proto.Field( - proto.ENUM, - number=4, - enum=Decision, - ) - comment: str = proto.Field( - proto.STRING, - number=5, - ) - url: str = proto.Field( - proto.STRING, - number=6, - ) - - -class BuildTrigger(proto.Message): - r"""Configuration for an automated build in response to source - repository changes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - resource_name (str): - The ``Trigger`` name with format: - ``projects/{project}/locations/{location}/triggers/{trigger}``, - where {trigger} is a unique identifier generated by the - service. - id (str): - Output only. Unique identifier of the - trigger. - description (str): - Human-readable description of this trigger. - name (str): - User-assigned name of the trigger. Must be - unique within the project. Trigger names must - meet the following requirements: - + They must contain only alphanumeric characters - and dashes. + They can be 1-64 characters long. - + They must begin and end with an alphanumeric - character. - tags (MutableSequence[str]): - Tags for annotation of a ``BuildTrigger`` - trigger_template (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Template describing the types of source changes to trigger a - build. - - Branch and tag names in trigger templates are interpreted as - regular expressions. Any branch or tag change that matches - that regular expression will trigger a build. - - Mutually exclusive with ``github``. - github (google.cloud.devtools.cloudbuild_v1.types.GitHubEventsConfig): - GitHubEventsConfig describes the configuration of a trigger - that creates a build whenever a GitHub event is received. - - Mutually exclusive with ``trigger_template``. - pubsub_config (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig): - PubsubConfig describes the configuration of a - trigger that creates a build whenever a Pub/Sub - message is published. - webhook_config (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig): - WebhookConfig describes the configuration of - a trigger that creates a build whenever a - webhook is sent to a trigger's webhook URL. - autodetect (bool): - Autodetect build configuration. The - following precedence is used (case insensitive): - 1. cloudbuild.yaml - 2. cloudbuild.yml - 3. cloudbuild.json - 4. Dockerfile - - Currently only available for GitHub App - Triggers. - - This field is a member of `oneof`_ ``build_template``. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Contents of the build template. - - This field is a member of `oneof`_ ``build_template``. - filename (str): - Path, from the source root, to the build - configuration file (i.e. cloudbuild.yaml). - - This field is a member of `oneof`_ ``build_template``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the trigger was - created. - disabled (bool): - If true, the trigger will never automatically - execute a build. - substitutions (MutableMapping[str, str]): - Substitutions for Build resource. The keys must match the - following regular expression: ``^_[A-Z0-9_]+$``. - ignored_files (MutableSequence[str]): - ignored_files and included_files are file glob matches using - https://golang.org/pkg/path/filepath/#Match extended with - support for "**". - - If ignored_files and changed files are both empty, then they - are not used to determine whether or not to trigger a build. - - If ignored_files is not empty, then we ignore any files that - match any of the ignored_file globs. If the change has no - files that are outside of the ignored_files globs, then we - do not trigger a build. - included_files (MutableSequence[str]): - If any of the files altered in the commit pass the - ignored_files filter and included_files is empty, then as - far as this filter is concerned, we should trigger the - build. - - If any of the files altered in the commit pass the - ignored_files filter and included_files is not empty, then - we make sure that at least one of those files matches a - included_files glob. If not, then we do not trigger a build. - filter (str): - Optional. A Common Expression Language - string. - service_account (str): - The service account used for all user-controlled operations - including UpdateBuildTrigger, RunBuildTrigger, CreateBuild, - and CancelBuild. If no service account is set, then the - standard Cloud Build service account - ([PROJECT_NUM]@system.gserviceaccount.com) will be used - instead. Format: - ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`` - repository_event_config (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig): - The configuration of a trigger that creates a - build whenever an event from Repo API is - received. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=34, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=10, - ) - name: str = proto.Field( - proto.STRING, - number=21, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=19, - ) - trigger_template: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=7, - message='RepoSource', - ) - github: 'GitHubEventsConfig' = proto.Field( - proto.MESSAGE, - number=13, - message='GitHubEventsConfig', - ) - pubsub_config: 'PubsubConfig' = proto.Field( - proto.MESSAGE, - number=29, - message='PubsubConfig', - ) - webhook_config: 'WebhookConfig' = proto.Field( - proto.MESSAGE, - number=31, - message='WebhookConfig', - ) - autodetect: bool = proto.Field( - proto.BOOL, - number=18, - oneof='build_template', - ) - build: 'Build' = proto.Field( - proto.MESSAGE, - number=4, - oneof='build_template', - message='Build', - ) - filename: str = proto.Field( - proto.STRING, - number=8, - oneof='build_template', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - disabled: bool = proto.Field( - proto.BOOL, - number=9, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=11, - ) - ignored_files: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=15, - ) - included_files: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - filter: str = proto.Field( - proto.STRING, - number=30, - ) - service_account: str = proto.Field( - proto.STRING, - number=33, - ) - repository_event_config: 'RepositoryEventConfig' = proto.Field( - proto.MESSAGE, - number=39, - message='RepositoryEventConfig', - ) - - -class RepositoryEventConfig(proto.Message): - r"""The configuration of a trigger that creates a build whenever - an event from Repo API is received. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - repository (str): - The resource name of the Repo API resource. - repository_type (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig.RepositoryType): - Output only. The type of the SCM vendor the - repository points to. - pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): - Filter to match changes in pull requests. - - This field is a member of `oneof`_ ``filter``. - push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): - Filter to match changes in refs like - branches, tags. - - This field is a member of `oneof`_ ``filter``. - """ - class RepositoryType(proto.Enum): - r"""All possible SCM repo types from Repo API. - - Values: - REPOSITORY_TYPE_UNSPECIFIED (0): - If unspecified, RepositoryType defaults to - GITHUB. - GITHUB (1): - The SCM repo is GITHUB. - GITHUB_ENTERPRISE (2): - The SCM repo is GITHUB Enterprise. - GITLAB_ENTERPRISE (3): - The SCM repo is GITLAB Enterprise. - """ - REPOSITORY_TYPE_UNSPECIFIED = 0 - GITHUB = 1 - GITHUB_ENTERPRISE = 2 - GITLAB_ENTERPRISE = 3 - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - repository_type: RepositoryType = proto.Field( - proto.ENUM, - number=2, - enum=RepositoryType, - ) - pull_request: 'PullRequestFilter' = proto.Field( - proto.MESSAGE, - number=3, - oneof='filter', - message='PullRequestFilter', - ) - push: 'PushFilter' = proto.Field( - proto.MESSAGE, - number=4, - oneof='filter', - message='PushFilter', - ) - - -class GitHubEventsConfig(proto.Message): - r"""GitHubEventsConfig describes the configuration of a trigger - that creates a build whenever a GitHub event is received. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - installation_id (int): - The installationID that emits the GitHub - event. - owner (str): - Owner of the repository. For example: The - owner for - https://github.com/googlecloudplatform/cloud-builders - is "googlecloudplatform". - name (str): - Name of the repository. For example: The name - for - https://github.com/googlecloudplatform/cloud-builders - is "cloud-builders". - pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): - filter to match changes in pull requests. - - This field is a member of `oneof`_ ``event``. - push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): - filter to match changes in refs like - branches, tags. - - This field is a member of `oneof`_ ``event``. - """ - - installation_id: int = proto.Field( - proto.INT64, - number=1, - ) - owner: str = proto.Field( - proto.STRING, - number=6, - ) - name: str = proto.Field( - proto.STRING, - number=7, - ) - pull_request: 'PullRequestFilter' = proto.Field( - proto.MESSAGE, - number=4, - oneof='event', - message='PullRequestFilter', - ) - push: 'PushFilter' = proto.Field( - proto.MESSAGE, - number=5, - oneof='event', - message='PushFilter', - ) - - -class PubsubConfig(proto.Message): - r"""PubsubConfig describes the configuration of a trigger that - creates a build whenever a Pub/Sub message is published. - - Attributes: - subscription (str): - Output only. Name of the subscription. Format is - ``projects/{project}/subscriptions/{subscription}``. - topic (str): - The name of the topic from which this subscription is - receiving messages. Format is - ``projects/{project}/topics/{topic}``. - service_account_email (str): - Service account that will make the push - request. - state (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig.State): - Potential issues with the underlying Pub/Sub - subscription configuration. Only populated on - get requests. - """ - class State(proto.Enum): - r"""Enumerates potential issues with the underlying Pub/Sub - subscription configuration. - - Values: - STATE_UNSPECIFIED (0): - The subscription configuration has not been - checked. - OK (1): - The Pub/Sub subscription is properly - configured. - SUBSCRIPTION_DELETED (2): - The subscription has been deleted. - TOPIC_DELETED (3): - The topic has been deleted. - SUBSCRIPTION_MISCONFIGURED (4): - Some of the subscription's field are - misconfigured. - """ - STATE_UNSPECIFIED = 0 - OK = 1 - SUBSCRIPTION_DELETED = 2 - TOPIC_DELETED = 3 - SUBSCRIPTION_MISCONFIGURED = 4 - - subscription: str = proto.Field( - proto.STRING, - number=1, - ) - topic: str = proto.Field( - proto.STRING, - number=2, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=3, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -class WebhookConfig(proto.Message): - r"""WebhookConfig describes the configuration of a trigger that - creates a build whenever a webhook is sent to a trigger's - webhook URL. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - secret (str): - Required. Resource name for the secret - required as a URL parameter. - - This field is a member of `oneof`_ ``auth_method``. - state (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig.State): - Potential issues with the underlying Pub/Sub - subscription configuration. Only populated on - get requests. - """ - class State(proto.Enum): - r"""Enumerates potential issues with the Secret Manager secret - provided by the user. - - Values: - STATE_UNSPECIFIED (0): - The webhook auth configuration not been - checked. - OK (1): - The auth configuration is properly setup. - SECRET_DELETED (2): - The secret provided in auth_method has been deleted. - """ - STATE_UNSPECIFIED = 0 - OK = 1 - SECRET_DELETED = 2 - - secret: str = proto.Field( - proto.STRING, - number=3, - oneof='auth_method', - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -class PullRequestFilter(proto.Message): - r"""PullRequestFilter contains filter properties for matching - GitHub Pull Requests. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - branch (str): - Regex of branches to match. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - comment_control (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter.CommentControl): - Configure builds to run whether a repository owner or - collaborator need to comment ``/gcbrun``. - invert_regex (bool): - If true, branches that do NOT match the git_ref will trigger - a build. - """ - class CommentControl(proto.Enum): - r"""Controls behavior of Pull Request comments. - - Values: - COMMENTS_DISABLED (0): - Do not require comments on Pull Requests - before builds are triggered. - COMMENTS_ENABLED (1): - Enforce that repository owners or - collaborators must comment on Pull Requests - before builds are triggered. - COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY (2): - Enforce that repository owners or - collaborators must comment on external - contributors' Pull Requests before builds are - triggered. - """ - COMMENTS_DISABLED = 0 - COMMENTS_ENABLED = 1 - COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2 - - branch: str = proto.Field( - proto.STRING, - number=2, - oneof='git_ref', - ) - comment_control: CommentControl = proto.Field( - proto.ENUM, - number=5, - enum=CommentControl, - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class PushFilter(proto.Message): - r"""Push contains filter properties for matching GitHub git - pushes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - branch (str): - Regexes matching branches to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - tag (str): - Regexes matching tags to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - invert_regex (bool): - When true, only trigger a build if the revision regex does - NOT match the git_ref regex. - """ - - branch: str = proto.Field( - proto.STRING, - number=2, - oneof='git_ref', - ) - tag: str = proto.Field( - proto.STRING, - number=3, - oneof='git_ref', - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class CreateBuildTriggerRequest(proto.Message): - r"""Request to create a new ``BuildTrigger``. - - Attributes: - parent (str): - The parent resource where this trigger will be created. - Format: ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project for which to - configure automatic builds. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger: 'BuildTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='BuildTrigger', - ) - - -class GetBuildTriggerRequest(proto.Message): - r"""Returns the ``BuildTrigger`` with the specified ID. - - Attributes: - name (str): - The name of the ``Trigger`` to retrieve. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBuildTriggersRequest(proto.Message): - r"""Request to list existing ``BuildTriggers``. - - Attributes: - parent (str): - The parent of the collection of ``Triggers``. Format: - ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project for which to list - BuildTriggers. - page_size (int): - Number of results to return in the list. - page_token (str): - Token to provide to skip to a particular spot - in the list. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListBuildTriggersResponse(proto.Message): - r"""Response containing existing ``BuildTriggers``. - - Attributes: - triggers (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): - ``BuildTriggers`` for the project, sorted by ``create_time`` - descending. - next_page_token (str): - Token to receive the next page of results. - """ - - @property - def raw_page(self): - return self - - triggers: MutableSequence['BuildTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BuildTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteBuildTriggerRequest(proto.Message): - r"""Request to delete a ``BuildTrigger``. - - Attributes: - name (str): - The name of the ``Trigger`` to delete. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateBuildTriggerRequest(proto.Message): - r"""Request to update an existing ``BuildTrigger``. - - Attributes: - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to update. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to update. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - trigger: 'BuildTrigger' = proto.Field( - proto.MESSAGE, - number=3, - message='BuildTrigger', - ) - - -class BuildOptions(proto.Message): - r"""Optional arguments to enable specific features of builds. - - Attributes: - source_provenance_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash.HashType]): - Requested hash for SourceProvenance. - requested_verify_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.VerifyOption): - Requested verifiability options. - machine_type (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.MachineType): - Compute Engine machine type on which to run - the build. - disk_size_gb (int): - Requested disk size for the VM that runs the build. Note - that this is *NOT* "disk free"; some of the space will be - used by the operating system and build utilities. Also note - that this is the minimum disk size that will be allocated - for the build -- the build may run with a larger disk than - requested. At present, the maximum disk size is 2000GB; - builds that request more than the maximum are rejected with - an error. - substitution_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.SubstitutionOption): - Option to specify behavior when there is an error in the - substitution checks. - - NOTE: this is always set to ALLOW_LOOSE for triggered builds - and cannot be overridden in the build configuration file. - dynamic_substitutions (bool): - Option to specify whether or not to apply - bash style string operations to the - substitutions. - NOTE: this is always enabled for triggered - builds and cannot be overridden in the build - configuration file. - log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): - Option to define build log streaming behavior - to Cloud Storage. - worker_pool (str): - This field deprecated; please use ``pool.name`` instead. - pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): - Optional. Specification for execution on a ``WorkerPool``. - - See `running builds in a private - pool `__ - for more information. - logging (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LoggingMode): - Option to specify the logging mode, which - determines if and where build logs are stored. - env (MutableSequence[str]): - A list of global environment variable - definitions that will exist for all build steps - in this build. If a variable is defined in both - globally and in a build step, the variable will - use the build step value. - The elements are of the form "KEY=VALUE" for the - environment variable "KEY" being given the value - "VALUE". - secret_env (MutableSequence[str]): - A list of global environment variables, which are encrypted - using a Cloud Key Management Service crypto key. These - values must be specified in the build's ``Secret``. These - variables will be available to all build steps in this - build. - volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): - Global list of volumes to mount for ALL build - steps - Each volume is created as an empty volume prior - to starting the build process. Upon completion - of the build, volumes and their contents are - discarded. Global volume names and paths cannot - conflict with the volumes defined a build step. - - Using a global volume in a build with only one - step is not valid as it is indicative of a build - request with an incorrect configuration. - default_logs_bucket_behavior (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.DefaultLogsBucketBehavior): - Optional. Option to specify how default logs - buckets are setup. - """ - class VerifyOption(proto.Enum): - r"""Specifies the manner in which the build should be verified, if at - all. - - If a verified build is requested, and any part of the process to - generate and upload provenance fails, the build will also fail. - - If the build does not request verification then that process may - occur, but is not guaranteed to. If it does occur and fails, the - build will not fail. - - For more information, see `Viewing Build - Provenance `__. - - Values: - NOT_VERIFIED (0): - Not a verifiable build (the default). - VERIFIED (1): - Build must be verified. - """ - NOT_VERIFIED = 0 - VERIFIED = 1 - - class MachineType(proto.Enum): - r"""Supported Compute Engine machine types. For more information, see - `Machine - types `__. - - Values: - UNSPECIFIED (0): - Standard machine type. - N1_HIGHCPU_8 (1): - Highcpu machine with 8 CPUs. - N1_HIGHCPU_32 (2): - Highcpu machine with 32 CPUs. - E2_HIGHCPU_8 (5): - Highcpu e2 machine with 8 CPUs. - E2_HIGHCPU_32 (6): - Highcpu e2 machine with 32 CPUs. - E2_MEDIUM (7): - E2 machine with 1 CPU. - """ - UNSPECIFIED = 0 - N1_HIGHCPU_8 = 1 - N1_HIGHCPU_32 = 2 - E2_HIGHCPU_8 = 5 - E2_HIGHCPU_32 = 6 - E2_MEDIUM = 7 - - class SubstitutionOption(proto.Enum): - r"""Specifies the behavior when there is an error in the - substitution checks. - - Values: - MUST_MATCH (0): - Fails the build if error in substitutions - checks, like missing a substitution in the - template or in the map. - ALLOW_LOOSE (1): - Do not fail the build if error in - substitutions checks. - """ - MUST_MATCH = 0 - ALLOW_LOOSE = 1 - - class LogStreamingOption(proto.Enum): - r"""Specifies the behavior when writing build logs to Cloud - Storage. - - Values: - STREAM_DEFAULT (0): - Service may automatically determine build log - streaming behavior. - STREAM_ON (1): - Build logs should be streamed to Cloud - Storage. - STREAM_OFF (2): - Build logs should not be streamed to Cloud - Storage; they will be written when the build is - completed. - """ - STREAM_DEFAULT = 0 - STREAM_ON = 1 - STREAM_OFF = 2 - - class LoggingMode(proto.Enum): - r"""Specifies the logging mode. - - Values: - LOGGING_UNSPECIFIED (0): - The service determines the logging mode. The default is - ``LEGACY``. Do not rely on the default logging behavior as - it may change in the future. - LEGACY (1): - Build logs are stored in Cloud Logging and - Cloud Storage. - GCS_ONLY (2): - Build logs are stored in Cloud Storage. - STACKDRIVER_ONLY (3): - This option is the same as CLOUD_LOGGING_ONLY. - CLOUD_LOGGING_ONLY (5): - Build logs are stored in Cloud Logging. Selecting this - option will not allow `logs - streaming `__. - NONE (4): - Turn off all logging. No build logs will be - captured. - """ - LOGGING_UNSPECIFIED = 0 - LEGACY = 1 - GCS_ONLY = 2 - STACKDRIVER_ONLY = 3 - CLOUD_LOGGING_ONLY = 5 - NONE = 4 - - class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. - - Values: - DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): - Unspecified. - REGIONAL_USER_OWNED_BUCKET (1): - Bucket is located in user-owned project in - the same region as the build. The builder - service account must have access to create and - write to GCS buckets in the build project. - """ - DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 - REGIONAL_USER_OWNED_BUCKET = 1 - - class PoolOption(proto.Message): - r"""Details about how a build should be executed on a ``WorkerPool``. - - See `running builds in a private - pool `__ - for more information. - - Attributes: - name (str): - The ``WorkerPool`` resource to execute the build on. You - must have ``cloudbuild.workerpools.use`` on the project - hosting the WorkerPool. - - Format - projects/{project}/locations/{location}/workerPools/{workerPoolId} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - source_provenance_hash: MutableSequence['Hash.HashType'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='Hash.HashType', - ) - requested_verify_option: VerifyOption = proto.Field( - proto.ENUM, - number=2, - enum=VerifyOption, - ) - machine_type: MachineType = proto.Field( - proto.ENUM, - number=3, - enum=MachineType, - ) - disk_size_gb: int = proto.Field( - proto.INT64, - number=6, - ) - substitution_option: SubstitutionOption = proto.Field( - proto.ENUM, - number=4, - enum=SubstitutionOption, - ) - dynamic_substitutions: bool = proto.Field( - proto.BOOL, - number=17, - ) - log_streaming_option: LogStreamingOption = proto.Field( - proto.ENUM, - number=5, - enum=LogStreamingOption, - ) - worker_pool: str = proto.Field( - proto.STRING, - number=7, - ) - pool: PoolOption = proto.Field( - proto.MESSAGE, - number=19, - message=PoolOption, - ) - logging: LoggingMode = proto.Field( - proto.ENUM, - number=11, - enum=LoggingMode, - ) - env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=12, - ) - secret_env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - volumes: MutableSequence['Volume'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='Volume', - ) - default_logs_bucket_behavior: DefaultLogsBucketBehavior = proto.Field( - proto.ENUM, - number=21, - enum=DefaultLogsBucketBehavior, - ) - - -class ReceiveTriggerWebhookRequest(proto.Message): - r"""ReceiveTriggerWebhookRequest [Experimental] is the request object - accepted by the ReceiveTriggerWebhook method. - - Attributes: - name (str): - The name of the ``ReceiveTriggerWebhook`` to retrieve. - Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - body (google.api.httpbody_pb2.HttpBody): - HTTP request body. - project_id (str): - Project in which the specified trigger lives - trigger (str): - Name of the trigger to run the payload - against - secret (str): - Secret token used for authorization if an - OAuth token isn't provided. - """ - - name: str = proto.Field( - proto.STRING, - number=5, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=1, - message=httpbody_pb2.HttpBody, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - trigger: str = proto.Field( - proto.STRING, - number=3, - ) - secret: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ReceiveTriggerWebhookResponse(proto.Message): - r"""ReceiveTriggerWebhookResponse [Experimental] is the response object - for the ReceiveTriggerWebhook method. - - """ - - -class WorkerPool(proto.Message): - r"""Configuration for a ``WorkerPool``. - - Cloud Build owns and maintains a pool of workers for general use and - have no access to a project's private network. By default, builds - submitted to Cloud Build will use a worker from this pool. - - If your build needs access to resources on a private network, create - and use a ``WorkerPool`` to run your builds. Private - ``WorkerPool``\ s give your builds access to any single VPC network - that you administer, including any on-prem resources connected to - that VPC network. For an overview of private pools, see `Private - pools - overview `__. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The resource name of the ``WorkerPool``, with - format - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - The value of ``{worker_pool}`` is provided by - ``worker_pool_id`` in ``CreateWorkerPool`` request and the - value of ``{location}`` is determined by the endpoint - accessed. - display_name (str): - A user-specified, human-readable name for the - ``WorkerPool``. If provided, this value must be 1-63 - characters. - uid (str): - Output only. A unique identifier for the ``WorkerPool``. - annotations (MutableMapping[str, str]): - User specified annotations. See - https://google.aip.dev/128#annotations - for more details such as format and size - limitations. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to create the - ``WorkerPool`` was received. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to update the - ``WorkerPool`` was received. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to delete the - ``WorkerPool`` was received. - state (google.cloud.devtools.cloudbuild_v1.types.WorkerPool.State): - Output only. ``WorkerPool`` state. - private_pool_v1_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config): - Legacy Private Pool configuration. - - This field is a member of `oneof`_ ``config``. - etag (str): - Output only. Checksum computed by the server. - May be sent on update and delete requests to - ensure that the client has an up-to-date value - before proceeding. - """ - class State(proto.Enum): - r"""State of the ``WorkerPool``. - - Values: - STATE_UNSPECIFIED (0): - State of the ``WorkerPool`` is unknown. - CREATING (1): - ``WorkerPool`` is being created. - RUNNING (2): - ``WorkerPool`` is running. - DELETING (3): - ``WorkerPool`` is being deleted: cancelling builds and - draining workers. - DELETED (4): - ``WorkerPool`` is deleted. - UPDATING (5): - ``WorkerPool`` is being updated; new builds cannot be run. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - RUNNING = 2 - DELETING = 3 - DELETED = 4 - UPDATING = 5 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - private_pool_v1_config: 'PrivatePoolV1Config' = proto.Field( - proto.MESSAGE, - number=12, - oneof='config', - message='PrivatePoolV1Config', - ) - etag: str = proto.Field( - proto.STRING, - number=11, - ) - - -class PrivatePoolV1Config(proto.Message): - r"""Configuration for a V1 ``PrivatePool``. - - Attributes: - worker_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.WorkerConfig): - Machine configuration for the workers in the - pool. - network_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig): - Network configuration for the pool. - """ - - class WorkerConfig(proto.Message): - r"""Defines the configuration to be used for creating workers in - the pool. - - Attributes: - machine_type (str): - Machine type of a worker, such as ``e2-medium``. See `Worker - pool config - file `__. - If left blank, Cloud Build will use a sensible default. - disk_size_gb (int): - Size of the disk attached to the worker, in GB. See `Worker - pool config - file `__. - Specify a value of up to 2000. If ``0`` is specified, Cloud - Build will use a standard disk size. - """ - - machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - disk_size_gb: int = proto.Field( - proto.INT64, - number=2, - ) - - class NetworkConfig(proto.Message): - r"""Defines the network configuration for the pool. - - Attributes: - peered_network (str): - Required. Immutable. The network definition that the workers - are peered to. If this section is left empty, the workers - will be peered to ``WorkerPool.project_id`` on the service - producer network. Must be in the format - ``projects/{project}/global/networks/{network}``, where - ``{project}`` is a project number, such as ``12345``, and - ``{network}`` is the name of a VPC network in the project. - See `Understanding network configuration - options `__ - egress_option (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig.EgressOption): - Option to configure network egress for the - workers. - peered_network_ip_range (str): - Immutable. Subnet IP range within the peered network. This - is specified in CIDR notation with a slash and the subnet - prefix size. You can optionally specify an IP address before - the subnet prefix value. e.g. ``192.168.0.0/29`` would - specify an IP range starting at 192.168.0.0 with a prefix - size of 29 bits. ``/16`` would specify a prefix size of 16 - bits, with an automatically determined IP within the peered - VPC. If unspecified, a value of ``/24`` will be used. - """ - class EgressOption(proto.Enum): - r"""Defines the egress option for the pool. - - Values: - EGRESS_OPTION_UNSPECIFIED (0): - If set, defaults to PUBLIC_EGRESS. - NO_PUBLIC_EGRESS (1): - If set, workers are created without any - public address, which prevents network egress to - public IPs unless a network proxy is configured. - PUBLIC_EGRESS (2): - If set, workers are created with a public - address which allows for public internet egress. - """ - EGRESS_OPTION_UNSPECIFIED = 0 - NO_PUBLIC_EGRESS = 1 - PUBLIC_EGRESS = 2 - - peered_network: str = proto.Field( - proto.STRING, - number=1, - ) - egress_option: 'PrivatePoolV1Config.NetworkConfig.EgressOption' = proto.Field( - proto.ENUM, - number=2, - enum='PrivatePoolV1Config.NetworkConfig.EgressOption', - ) - peered_network_ip_range: str = proto.Field( - proto.STRING, - number=3, - ) - - worker_config: WorkerConfig = proto.Field( - proto.MESSAGE, - number=1, - message=WorkerConfig, - ) - network_config: NetworkConfig = proto.Field( - proto.MESSAGE, - number=2, - message=NetworkConfig, - ) - - -class CreateWorkerPoolRequest(proto.Message): - r"""Request to create a new ``WorkerPool``. - - Attributes: - parent (str): - Required. The parent resource where this worker pool will be - created. Format: - ``projects/{project}/locations/{location}``. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. ``WorkerPool`` resource to create. - worker_pool_id (str): - Required. Immutable. The ID to use for the ``WorkerPool``, - which will become the final component of the resource name. - - This value should be 1-63 characters, and valid characters - are /[a-z][0-9]-/. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - worker_pool: 'WorkerPool' = proto.Field( - proto.MESSAGE, - number=2, - message='WorkerPool', - ) - worker_pool_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class GetWorkerPoolRequest(proto.Message): - r"""Request to get a ``WorkerPool`` with the specified name. - - Attributes: - name (str): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteWorkerPoolRequest(proto.Message): - r"""Request to delete a ``WorkerPool``. - - Attributes: - name (str): - Required. The name of the ``WorkerPool`` to delete. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - etag (str): - Optional. If provided, it must match the - server's etag on the workerpool for the request - to be processed. - allow_missing (bool): - If set to true, and the ``WorkerPool`` is not found, the - request will succeed but no action will be taken on the - server. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateWorkerPoolRequest(proto.Message): - r"""Request to update a ``WorkerPool``. - - Attributes: - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the ``WorkerPool`` to - update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to update. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - worker_pool: 'WorkerPool' = proto.Field( - proto.MESSAGE, - number=1, - message='WorkerPool', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class ListWorkerPoolsRequest(proto.Message): - r"""Request to list ``WorkerPool``\ s. - - Attributes: - parent (str): - Required. The parent of the collection of ``WorkerPools``. - Format: ``projects/{project}/locations/{location}``. - page_size (int): - The maximum number of ``WorkerPool``\ s to return. The - service may return fewer than this value. If omitted, the - server will use a sensible default. - page_token (str): - A page token, received from a previous ``ListWorkerPools`` - call. Provide this to retrieve the subsequent page. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListWorkerPoolsResponse(proto.Message): - r"""Response containing existing ``WorkerPools``. - - Attributes: - worker_pools (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): - ``WorkerPools`` for the specified project. - next_page_token (str): - Continuation token used to page through large - result sets. Provide this value in a subsequent - ListWorkerPoolsRequest to return the next page - of results. - """ - - @property - def raw_page(self): - return self - - worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='WorkerPool', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``CreateWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` to create. Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``UpdateWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` being updated. - Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class DeleteWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``DeleteWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` being deleted. - Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index e09b880c..00000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/devtools/cloudbuild_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py deleted file mode 100644 index a8280c5e..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApproveBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py deleted file mode 100644 index e90be4b0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApproveBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py deleted file mode 100644 index 73320372..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.cancel_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py deleted file mode 100644 index 656b5d59..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.cancel_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py deleted file mode 100644 index 07750a37..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py deleted file mode 100644 index 173aea57..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py deleted file mode 100644 index 9fe3fcdf..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.create_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py deleted file mode 100644 index 8ddcd2f3..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = client.create_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py deleted file mode 100644 index cd0a773b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py deleted file mode 100644 index 80396e7e..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py deleted file mode 100644 index 62955bcc..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - await client.delete_build_trigger(request=request) - - -# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py deleted file mode 100644 index 249ba150..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - client.delete_build_trigger(request=request) - - -# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py deleted file mode 100644 index 257fa9ba..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py deleted file mode 100644 index a2b9f632..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py deleted file mode 100644 index 585bce61..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.get_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py deleted file mode 100644 index d767fe6c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.get_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py deleted file mode 100644 index 373b419b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = await client.get_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py deleted file mode 100644 index f2dd1102..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = client.get_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py deleted file mode 100644 index 1ad3016f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = await client.get_worker_pool(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py deleted file mode 100644 index fd50d2fd..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = client.get_worker_pool(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py deleted file mode 100644 index 43b21efa..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuildTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py deleted file mode 100644 index 86f6e1c1..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuildTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py deleted file mode 100644 index 30ad36a2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuilds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py deleted file mode 100644 index 9c2813c3..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuilds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py deleted file mode 100644 index 378636ad..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkerPools -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py deleted file mode 100644 index d23cdb76..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkerPools -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py deleted file mode 100644 index 133c477b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReceiveTriggerWebhook -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = await client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py deleted file mode 100644 index 839f241c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReceiveTriggerWebhook -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py deleted file mode 100644 index 8c671273..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RetryBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py deleted file mode 100644 index 6b1d79f0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RetryBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py deleted file mode 100644 index 1c33cfb2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py deleted file mode 100644 index 78b1a643..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py deleted file mode 100644 index 46d6ea7f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.update_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py deleted file mode 100644 index 4022a4e4..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = client.update_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py deleted file mode 100644 index 4152140a..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py deleted file mode 100644 index b7bab1b1..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json deleted file mode 100644 index e379efab..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ /dev/null @@ -1,3027 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.devtools.cloudbuild.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-build", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.approve_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ApproveBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "approval_result", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "approve_build" - }, - "description": "Sample for ApproveBuild", - "file": "cloudbuild_v1_generated_cloud_build_approve_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_approve_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.approve_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ApproveBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "approval_result", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "approve_build" - }, - "description": "Sample for ApproveBuild", - "file": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.cancel_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CancelBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "cancel_build" - }, - "description": "Sample for CancelBuild", - "file": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.cancel_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CancelBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "cancel_build" - }, - "description": "Sample for CancelBuild", - "file": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "create_build_trigger" - }, - "description": "Sample for CreateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "create_build_trigger" - }, - "description": "Sample for CreateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "build", - "type": "google.cloud.devtools.cloudbuild_v1.types.Build" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_build" - }, - "description": "Sample for CreateBuild", - "file": "cloudbuild_v1_generated_cloud_build_create_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "build", - "type": "google.cloud.devtools.cloudbuild_v1.types.Build" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_build" - }, - "description": "Sample for CreateBuild", - "file": "cloudbuild_v1_generated_cloud_build_create_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "worker_pool_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_worker_pool" - }, - "description": "Sample for CreateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "worker_pool_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_worker_pool" - }, - "description": "Sample for CreateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_build_trigger" - }, - "description": "Sample for DeleteBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_build_trigger" - }, - "description": "Sample for DeleteBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_worker_pool" - }, - "description": "Sample for DeleteWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_worker_pool" - }, - "description": "Sample for DeleteWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "get_build_trigger" - }, - "description": "Sample for GetBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "get_build_trigger" - }, - "description": "Sample for GetBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "get_build" - }, - "description": "Sample for GetBuild", - "file": "cloudbuild_v1_generated_cloud_build_get_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "get_build" - }, - "description": "Sample for GetBuild", - "file": "cloudbuild_v1_generated_cloud_build_get_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", - "shortName": "get_worker_pool" - }, - "description": "Sample for GetWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", - "shortName": "get_worker_pool" - }, - "description": "Sample for GetWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_build_triggers", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuildTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager", - "shortName": "list_build_triggers" - }, - "description": "Sample for ListBuildTriggers", - "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_build_triggers", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuildTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager", - "shortName": "list_build_triggers" - }, - "description": "Sample for ListBuildTriggers", - "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_builds", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuilds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager", - "shortName": "list_builds" - }, - "description": "Sample for ListBuilds", - "file": "cloudbuild_v1_generated_cloud_build_list_builds_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_builds_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_builds", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuilds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager", - "shortName": "list_builds" - }, - "description": "Sample for ListBuilds", - "file": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_worker_pools", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListWorkerPools" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager", - "shortName": "list_worker_pools" - }, - "description": "Sample for ListWorkerPools", - "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_worker_pools", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListWorkerPools" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager", - "shortName": "list_worker_pools" - }, - "description": "Sample for ListWorkerPools", - "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.receive_trigger_webhook", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ReceiveTriggerWebhook" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", - "shortName": "receive_trigger_webhook" - }, - "description": "Sample for ReceiveTriggerWebhook", - "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.receive_trigger_webhook", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ReceiveTriggerWebhook" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", - "shortName": "receive_trigger_webhook" - }, - "description": "Sample for ReceiveTriggerWebhook", - "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.retry_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RetryBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "retry_build" - }, - "description": "Sample for RetryBuild", - "file": "cloudbuild_v1_generated_cloud_build_retry_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_retry_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.retry_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RetryBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "retry_build" - }, - "description": "Sample for RetryBuild", - "file": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.run_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RunBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "source", - "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "run_build_trigger" - }, - "description": "Sample for RunBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.run_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RunBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "source", - "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "run_build_trigger" - }, - "description": "Sample for RunBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "update_build_trigger" - }, - "description": "Sample for UpdateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "update_build_trigger" - }, - "description": "Sample for UpdateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_worker_pool" - }, - "description": "Sample for UpdateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_worker_pool" - }, - "description": "Sample for UpdateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py deleted file mode 100644 index 78aabad9..00000000 --- a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py +++ /dev/null @@ -1,193 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class cloudbuildCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'approve_build': ('name', 'approval_result', ), - 'cancel_build': ('project_id', 'id', 'name', ), - 'create_build': ('project_id', 'build', 'parent', ), - 'create_build_trigger': ('project_id', 'trigger', 'parent', ), - 'create_worker_pool': ('parent', 'worker_pool', 'worker_pool_id', 'validate_only', ), - 'delete_build_trigger': ('project_id', 'trigger_id', 'name', ), - 'delete_worker_pool': ('name', 'etag', 'allow_missing', 'validate_only', ), - 'get_build': ('project_id', 'id', 'name', ), - 'get_build_trigger': ('project_id', 'trigger_id', 'name', ), - 'get_worker_pool': ('name', ), - 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), - 'list_build_triggers': ('project_id', 'parent', 'page_size', 'page_token', ), - 'list_worker_pools': ('parent', 'page_size', 'page_token', ), - 'receive_trigger_webhook': ('name', 'body', 'project_id', 'trigger', 'secret', ), - 'retry_build': ('project_id', 'id', 'name', ), - 'run_build_trigger': ('project_id', 'trigger_id', 'name', 'source', ), - 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), - 'update_worker_pool': ('worker_pool', 'update_mask', 'validate_only', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=cloudbuildCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the cloudbuild client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 49cab02d..00000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-build' - - -description = "Google Cloud Build API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-build" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud", "google.cloud.devtools"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py deleted file mode 100644 index e76401f0..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ /dev/null @@ -1,10282 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api import httpbody_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudBuildClient._get_default_mtls_endpoint(None) is None - assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), -]) -def test_cloud_build_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CloudBuildGrpcTransport, "grpc"), - (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudBuildRestTransport, "rest"), -]) -def test_cloud_build_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), -]) -def test_cloud_build_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -def test_cloud_build_client_get_transport_class(): - transport = CloudBuildClient.get_transport_class() - available_transports = [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildRestTransport, - ] - assert transport in available_transports - - transport = CloudBuildClient.get_transport_class("grpc") - assert transport == transports.CloudBuildGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -def test_cloud_build_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_build_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - CloudBuildClient, CloudBuildAsyncClient -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), -]) -def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), -]) -def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_cloud_build_client_client_options_from_dict(): - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CloudBuildClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cloud_build_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildRequest, - dict, -]) -def test_create_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - client.create_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - -@pytest.mark.asyncio -async def test_create_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_build_async_from_dict(): - await test_create_build_async(request_type=dict) - -def test_create_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].build - mock_val = cloudbuild.Build(name='name_value') - assert arg == mock_val - - -def test_create_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].build - mock_val = cloudbuild.Build(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildRequest, - dict, -]) -def test_get_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - response = client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_get_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - client.get_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - -@pytest.mark.asyncio -async def test_get_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - )) - response = await client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_build_async_from_dict(): - await test_get_build_async(request_type=dict) - -def test_get_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - call.return_value = cloudbuild.Build() - client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_get_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_get_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildsRequest, - dict, -]) -def test_list_builds(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_builds_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - client.list_builds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - -@pytest.mark.asyncio -async def test_list_builds_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildsRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_builds_async_from_dict(): - await test_list_builds_async(request_type=dict) - -def test_list_builds_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - call.return_value = cloudbuild.ListBuildsResponse() - client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_builds_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_builds( - project_id='project_id_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_list_builds_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_list_builds_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_builds( - project_id='project_id_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_builds_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - - -def test_list_builds_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_builds(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in results) -def test_list_builds_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - pages = list(client.list_builds(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_builds_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_builds(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_builds_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_builds(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CancelBuildRequest, - dict, -]) -def test_cancel_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - response = client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_cancel_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - client.cancel_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - -@pytest.mark.asyncio -async def test_cancel_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CancelBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - )) - response = await client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_cancel_build_async_from_dict(): - await test_cancel_build_async(request_type=dict) - -def test_cancel_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - call.return_value = cloudbuild.Build() - client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_cancel_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_cancel_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_cancel_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RetryBuildRequest, - dict, -]) -def test_retry_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_retry_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - client.retry_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - -@pytest.mark.asyncio -async def test_retry_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RetryBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_retry_build_async_from_dict(): - await test_retry_build_async(request_type=dict) - -def test_retry_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_retry_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.retry_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_retry_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_retry_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.retry_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_retry_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ApproveBuildRequest, - dict, -]) -def test_approve_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_approve_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - client.approve_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - -@pytest.mark.asyncio -async def test_approve_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ApproveBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_approve_build_async_from_dict(): - await test_approve_build_async(request_type=dict) - -def test_approve_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_approve_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.approve_build( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].approval_result - mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') - assert arg == mock_val - - -def test_approve_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - -@pytest.mark.asyncio -async def test_approve_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.approve_build( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].approval_result - mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_approve_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildTriggerRequest, - dict, -]) -def test_create_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_create_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - client.create_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_create_build_trigger_async_from_dict(): - await test_create_build_trigger_async(request_type=dict) - -def test_create_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - - -def test_create_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildTriggerRequest, - dict, -]) -def test_get_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_get_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - client.get_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_build_trigger_async_from_dict(): - await test_get_build_trigger_async(request_type=dict) - -def test_get_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - - -def test_get_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - -@pytest.mark.asyncio -async def test_get_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildTriggersRequest, - dict, -]) -def test_list_build_triggers(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_build_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - client.list_build_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - -@pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildTriggersRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_build_triggers_async_from_dict(): - await test_list_build_triggers_async(request_type=dict) - -def test_list_build_triggers_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - call.return_value = cloudbuild.ListBuildTriggersResponse() - client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_build_triggers_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_build_triggers( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - - -def test_list_build_triggers_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - -@pytest.mark.asyncio -async def test_list_build_triggers_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_build_triggers( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_build_triggers_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - - -def test_list_build_triggers_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_build_triggers(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in results) -def test_list_build_triggers_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_build_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_build_triggers_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_build_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_build_triggers_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_build_triggers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteBuildTriggerRequest, - dict, -]) -def test_delete_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - client.delete_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_build_trigger_async_from_dict(): - await test_delete_build_trigger_async(request_type=dict) - -def test_delete_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - call.return_value = None - client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_delete_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - - -def test_delete_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - -@pytest.mark.asyncio -async def test_delete_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateBuildTriggerRequest, - dict, -]) -def test_update_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_update_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - client.update_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_update_build_trigger_async_from_dict(): - await test_update_build_trigger_async(request_type=dict) - -def test_update_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest(**{"trigger": {"resource_name": "projects/sample1/locations/sample2/triggers/sample3"}}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_update_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - - -def test_update_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - -@pytest.mark.asyncio -async def test_update_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RunBuildTriggerRequest, - dict, -]) -def test_run_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_run_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - client.run_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RunBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_run_build_trigger_async_from_dict(): - await test_run_build_trigger_async(request_type=dict) - -def test_run_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_run_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].source - mock_val = cloudbuild.RepoSource(project_id='project_id_value') - assert arg == mock_val - - -def test_run_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - -@pytest.mark.asyncio -async def test_run_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].source - mock_val = cloudbuild.RepoSource(project_id='project_id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ReceiveTriggerWebhookRequest, - dict, -]) -def test_receive_trigger_webhook(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ReceiveTriggerWebhookResponse( - ) - response = client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -def test_receive_trigger_webhook_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - client.receive_trigger_webhook() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ReceiveTriggerWebhookRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse( - )) - response = await client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_async_from_dict(): - await test_receive_trigger_webhook_async(request_type=dict) - - -def test_receive_trigger_webhook_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ReceiveTriggerWebhookRequest() - - request.project_id = 'project_id_value' - request.trigger = 'trigger_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - call.return_value = cloudbuild.ReceiveTriggerWebhookResponse() - client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&trigger=trigger_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ReceiveTriggerWebhookRequest() - - request.project_id = 'project_id_value' - request.trigger = 'trigger_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse()) - await client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&trigger=trigger_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateWorkerPoolRequest, - dict, -]) -def test_create_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - client.create_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_worker_pool_async_from_dict(): - await test_create_worker_pool_async(request_type=dict) - -def test_create_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_worker_pool( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].worker_pool_id - mock_val = 'worker_pool_id_value' - assert arg == mock_val - - -def test_create_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - -@pytest.mark.asyncio -async def test_create_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_worker_pool( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].worker_pool_id - mock_val = 'worker_pool_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetWorkerPoolRequest, - dict, -]) -def test_get_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - ) - response = client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -def test_get_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - client.get_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - )) - response = await client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_worker_pool_async_from_dict(): - await test_get_worker_pool_async(request_type=dict) - -def test_get_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - call.return_value = cloudbuild.WorkerPool() - client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteWorkerPoolRequest, - dict, -]) -def test_delete_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - client.delete_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_worker_pool_async_from_dict(): - await test_delete_worker_pool_async(request_type=dict) - -def test_delete_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_delete_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateWorkerPoolRequest, - dict, -]) -def test_update_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - client.update_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_worker_pool_async_from_dict(): - await test_update_worker_pool_async(request_type=dict) - -def test_update_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest(**{"worker_pool": {"name": "projects/sample1/locations/sample2/workerPools/sample3"}}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_update_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_worker_pool( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_worker_pool( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListWorkerPoolsRequest, - dict, -]) -def test_list_worker_pools(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_worker_pools_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - client.list_worker_pools() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - -@pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListWorkerPoolsRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_worker_pools_async_from_dict(): - await test_list_worker_pools_async(request_type=dict) - -def test_list_worker_pools_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - call.return_value = cloudbuild.ListWorkerPoolsResponse() - client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_worker_pools_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_worker_pools( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_worker_pools_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_worker_pools_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_worker_pools( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_worker_pools_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - - -def test_list_worker_pools_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_worker_pools(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in results) -def test_list_worker_pools_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - pages = list(client.list_worker_pools(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_worker_pools_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_worker_pools(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_worker_pools_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_worker_pools(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildRequest, - dict, -]) -def test_create_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(("parent", )) & set(("projectId", "build", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.CreateBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_build(request) - - -def test_create_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) - - -def test_create_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - -def test_create_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildRequest, - dict, -]) -def test_get_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_build(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_get_build_rest_required_fields(request_type=cloudbuild.GetBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) - - request = cloudbuild.GetBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.Build() - - client.get_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_build(request) - - -def test_get_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}" % client.transport._host, args[1]) - - -def test_get_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_get_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildsRequest, - dict, -]) -def test_list_builds_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_builds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_builds_rest_required_fields(request_type=cloudbuild.ListBuildsRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", "parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_builds(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_builds_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_builds._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "parent", )) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_builds_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_builds") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_builds") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListBuildsResponse.to_json(cloudbuild.ListBuildsResponse()) - - request = cloudbuild.ListBuildsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListBuildsResponse() - - client.list_builds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_builds_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildsRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_builds(request) - - -def test_list_builds_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_builds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) - - -def test_list_builds_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - - -def test_list_builds_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListBuildsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.list_builds(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in results) - - pages = list(client.list_builds(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CancelBuildRequest, - dict, -]) -def test_cancel_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_build(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_cancel_build_rest_required_fields(request_type=cloudbuild.CancelBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_cancel_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_cancel_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) - - request = cloudbuild.CancelBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.Build() - - client.cancel_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_cancel_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CancelBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_build(request) - - -def test_cancel_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.cancel_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:cancel" % client.transport._host, args[1]) - - -def test_cancel_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_cancel_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RetryBuildRequest, - dict, -]) -def test_retry_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.retry_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_retry_build_rest_required_fields(request_type=cloudbuild.RetryBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.retry_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_retry_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.retry_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_retry_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_retry_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_retry_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.RetryBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.retry_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_retry_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RetryBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.retry_build(request) - - -def test_retry_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.retry_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:retry" % client.transport._host, args[1]) - - -def test_retry_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_retry_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ApproveBuildRequest, - dict, -]) -def test_approve_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/builds/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.approve_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_approve_build_rest_required_fields(request_type=cloudbuild.ApproveBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.approve_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_approve_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.approve_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_approve_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_approve_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_approve_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.ApproveBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.approve_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_approve_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ApproveBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/builds/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.approve_build(request) - - -def test_approve_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/builds/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.approve_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/builds/*}:approve" % client.transport._host, args[1]) - - -def test_approve_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - -def test_approve_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildTriggerRequest, - dict, -]) -def test_create_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_create_build_trigger_rest_required_fields(request_type=cloudbuild.CreateBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("parent", )) & set(("projectId", "trigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateBuildTriggerRequest.pb(cloudbuild.CreateBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.CreateBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.create_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_build_trigger(request) - - -def test_create_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) - - -def test_create_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -def test_create_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildTriggerRequest, - dict, -]) -def test_get_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_get_build_trigger_rest_required_fields(request_type=cloudbuild.GetBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetBuildTriggerRequest.pb(cloudbuild.GetBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.GetBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.get_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_build_trigger(request) - - -def test_get_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_get_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -def test_get_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildTriggersRequest, - dict, -]) -def test_list_build_triggers_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_build_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_build_triggers_rest_required_fields(request_type=cloudbuild.ListBuildTriggersRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_build_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_build_triggers_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_build_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "parent", )) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_build_triggers_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_build_triggers") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_build_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListBuildTriggersRequest.pb(cloudbuild.ListBuildTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListBuildTriggersResponse.to_json(cloudbuild.ListBuildTriggersResponse()) - - request = cloudbuild.ListBuildTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListBuildTriggersResponse() - - client.list_build_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_build_triggers_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildTriggersRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_build_triggers(request) - - -def test_list_build_triggers_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_build_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) - - -def test_list_build_triggers_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - - -def test_list_build_triggers_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListBuildTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.list_build_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in results) - - pages = list(client.list_build_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteBuildTriggerRequest, - dict, -]) -def test_delete_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_build_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_build_trigger_rest_required_fields(request_type=cloudbuild.DeleteBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_build_trigger") as pre: - pre.assert_not_called() - pb_message = cloudbuild.DeleteBuildTriggerRequest.pb(cloudbuild.DeleteBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = cloudbuild.DeleteBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_build_trigger(request) - - -def test_delete_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_delete_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -def test_delete_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateBuildTriggerRequest, - dict, -]) -def test_update_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_update_build_trigger_rest_required_fields(request_type=cloudbuild.UpdateBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "triggerId", "trigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.UpdateBuildTriggerRequest.pb(cloudbuild.UpdateBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.UpdateBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.update_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_build_trigger(request) - - -def test_update_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_update_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -def test_update_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RunBuildTriggerRequest, - dict, -]) -def test_run_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.run_build_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_run_build_trigger_rest_required_fields(request_type=cloudbuild.RunBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.run_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_run_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_run_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.RunBuildTriggerRequest.pb(cloudbuild.RunBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.RunBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.run_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RunBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_build_trigger(request) - - -def test_run_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.run_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}:run" % client.transport._host, args[1]) - - -def test_run_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - -def test_run_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ReceiveTriggerWebhookRequest, - dict, -]) -def test_receive_trigger_webhook_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger': 'sample2'} - request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ReceiveTriggerWebhookResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ReceiveTriggerWebhookResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.receive_trigger_webhook(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_receive_trigger_webhook_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb(cloudbuild.ReceiveTriggerWebhookRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ReceiveTriggerWebhookResponse.to_json(cloudbuild.ReceiveTriggerWebhookResponse()) - - request = cloudbuild.ReceiveTriggerWebhookRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() - - client.receive_trigger_webhook(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_receive_trigger_webhook_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ReceiveTriggerWebhookRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger': 'sample2'} - request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.receive_trigger_webhook(request) - - -def test_receive_trigger_webhook_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateWorkerPoolRequest, - dict, -]) -def test_create_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_worker_pool_rest_required_fields(request_type=cloudbuild.CreateWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["worker_pool_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "workerPoolId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "workerPoolId" in jsonified_request - assert jsonified_request["workerPoolId"] == request_init["worker_pool_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["workerPoolId"] = 'worker_pool_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", "worker_pool_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "workerPoolId" in jsonified_request - assert jsonified_request["workerPoolId"] == 'worker_pool_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_worker_pool(request) - - expected_params = [ - ( - "workerPoolId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", "workerPoolId", )) & set(("parent", "workerPool", "workerPoolId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateWorkerPoolRequest.pb(cloudbuild.CreateWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.CreateWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_worker_pool(request) - - -def test_create_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) - - -def test_create_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - -def test_create_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetWorkerPoolRequest, - dict, -]) -def test_get_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_worker_pool(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -def test_get_worker_pool_rest_required_fields(request_type=cloudbuild.GetWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetWorkerPoolRequest.pb(cloudbuild.GetWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.WorkerPool.to_json(cloudbuild.WorkerPool()) - - request = cloudbuild.GetWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.WorkerPool() - - client.get_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_worker_pool(request) - - -def test_get_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_get_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - - -def test_get_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteWorkerPoolRequest, - dict, -]) -def test_delete_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_worker_pool_rest_required_fields(request_type=cloudbuild.DeleteWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_delete_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.DeleteWorkerPoolRequest.pb(cloudbuild.DeleteWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.DeleteWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_worker_pool(request) - - -def test_delete_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_delete_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - - -def test_delete_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateWorkerPoolRequest, - dict, -]) -def test_update_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_worker_pool_rest_required_fields(request_type=cloudbuild.UpdateWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("workerPool", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.UpdateWorkerPoolRequest.pb(cloudbuild.UpdateWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.UpdateWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_worker_pool(request) - - -def test_update_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_update_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListWorkerPoolsRequest, - dict, -]) -def test_list_worker_pools_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_worker_pools(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_worker_pools_rest_required_fields(request_type=cloudbuild.ListWorkerPoolsRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_worker_pools(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_worker_pools_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_worker_pools._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_worker_pools_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_worker_pools") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_worker_pools") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListWorkerPoolsRequest.pb(cloudbuild.ListWorkerPoolsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListWorkerPoolsResponse.to_json(cloudbuild.ListWorkerPoolsResponse()) - - request = cloudbuild.ListWorkerPoolsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListWorkerPoolsResponse() - - client.list_worker_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_worker_pools_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListWorkerPoolsRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_worker_pools(request) - - -def test_list_worker_pools_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_worker_pools(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) - - -def test_list_worker_pools_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - - -def test_list_worker_pools_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListWorkerPoolsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_worker_pools(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in results) - - pages = list(client.list_worker_pools(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudBuildClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudBuildGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = CloudBuildClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudBuildGrpcTransport, - ) - -def test_cloud_build_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cloud_build_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_build', - 'get_build', - 'list_builds', - 'cancel_build', - 'retry_build', - 'approve_build', - 'create_build_trigger', - 'get_build_trigger', - 'list_build_triggers', - 'delete_build_trigger', - 'update_build_trigger', - 'run_build_trigger', - 'receive_trigger_webhook', - 'create_worker_pool', - 'get_worker_pool', - 'delete_worker_pool', - 'update_worker_pool', - 'list_worker_pools', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cloud_build_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_cloud_build_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport() - adc.assert_called_once() - - -def test_cloud_build_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudBuildClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - ], -) -def test_cloud_build_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, - ], -) -def test_cloud_build_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudBuildGrpcTransport, grpc_helpers), - (transports.CloudBuildGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_build_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_cloud_build_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudBuildRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_cloud_build_rest_lro_client(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_build_host_no_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_build_host_with_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_cloud_build_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudBuildClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudBuildClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_build._session - session2 = client2.transport.create_build._session - assert session1 != session2 - session1 = client1.transport.get_build._session - session2 = client2.transport.get_build._session - assert session1 != session2 - session1 = client1.transport.list_builds._session - session2 = client2.transport.list_builds._session - assert session1 != session2 - session1 = client1.transport.cancel_build._session - session2 = client2.transport.cancel_build._session - assert session1 != session2 - session1 = client1.transport.retry_build._session - session2 = client2.transport.retry_build._session - assert session1 != session2 - session1 = client1.transport.approve_build._session - session2 = client2.transport.approve_build._session - assert session1 != session2 - session1 = client1.transport.create_build_trigger._session - session2 = client2.transport.create_build_trigger._session - assert session1 != session2 - session1 = client1.transport.get_build_trigger._session - session2 = client2.transport.get_build_trigger._session - assert session1 != session2 - session1 = client1.transport.list_build_triggers._session - session2 = client2.transport.list_build_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_build_trigger._session - session2 = client2.transport.delete_build_trigger._session - assert session1 != session2 - session1 = client1.transport.update_build_trigger._session - session2 = client2.transport.update_build_trigger._session - assert session1 != session2 - session1 = client1.transport.run_build_trigger._session - session2 = client2.transport.run_build_trigger._session - assert session1 != session2 - session1 = client1.transport.receive_trigger_webhook._session - session2 = client2.transport.receive_trigger_webhook._session - assert session1 != session2 - session1 = client1.transport.create_worker_pool._session - session2 = client2.transport.create_worker_pool._session - assert session1 != session2 - session1 = client1.transport.get_worker_pool._session - session2 = client2.transport.get_worker_pool._session - assert session1 != session2 - session1 = client1.transport.delete_worker_pool._session - session2 = client2.transport.delete_worker_pool._session - assert session1 != session2 - session1 = client1.transport.update_worker_pool._session - session2 = client2.transport.update_worker_pool._session - assert session1 != session2 - session1 = client1.transport.list_worker_pools._session - session2 = client2.transport.list_worker_pools._session - assert session1 != session2 -def test_cloud_build_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_build_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cloud_build_grpc_lro_client(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cloud_build_grpc_lro_async_client(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_build_path(): - project = "squid" - build = "clam" - expected = "projects/{project}/builds/{build}".format(project=project, build=build, ) - actual = CloudBuildClient.build_path(project, build) - assert expected == actual - - -def test_parse_build_path(): - expected = { - "project": "whelk", - "build": "octopus", - } - path = CloudBuildClient.build_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_build_path(path) - assert expected == actual - -def test_build_trigger_path(): - project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) - actual = CloudBuildClient.build_trigger_path(project, location, trigger) - assert expected == actual - - -def test_parse_build_trigger_path(): - expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", - } - path = CloudBuildClient.build_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_build_trigger_path(path) - assert expected == actual - -def test_crypto_key_path(): - project = "scallop" - location = "abalone" - keyring = "squid" - key = "clam" - expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) - actual = CloudBuildClient.crypto_key_path(project, location, keyring, key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "whelk", - "location": "octopus", - "keyring": "oyster", - "key": "nudibranch", - } - path = CloudBuildClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_crypto_key_path(path) - assert expected == actual - -def test_network_path(): - project = "cuttlefish" - network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - actual = CloudBuildClient.network_path(project, network) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "project": "winkle", - "network": "nautilus", - } - path = CloudBuildClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_network_path(path) - assert expected == actual - -def test_repository_path(): - project = "scallop" - location = "abalone" - connection = "squid" - repository = "clam" - expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - actual = CloudBuildClient.repository_path(project, location, connection, repository) - assert expected == actual - - -def test_parse_repository_path(): - expected = { - "project": "whelk", - "location": "octopus", - "connection": "oyster", - "repository": "nudibranch", - } - path = CloudBuildClient.repository_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_repository_path(path) - assert expected == actual - -def test_secret_version_path(): - project = "cuttlefish" - secret = "mussel" - version = "winkle" - expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - actual = CloudBuildClient.secret_version_path(project, secret, version) - assert expected == actual - - -def test_parse_secret_version_path(): - expected = { - "project": "nautilus", - "secret": "scallop", - "version": "abalone", - } - path = CloudBuildClient.secret_version_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_secret_version_path(path) - assert expected == actual - -def test_service_account_path(): - project = "squid" - service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - actual = CloudBuildClient.service_account_path(project, service_account) - assert expected == actual - - -def test_parse_service_account_path(): - expected = { - "project": "whelk", - "service_account": "octopus", - } - path = CloudBuildClient.service_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_service_account_path(path) - assert expected == actual - -def test_subscription_path(): - project = "oyster" - subscription = "nudibranch" - expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - actual = CloudBuildClient.subscription_path(project, subscription) - assert expected == actual - - -def test_parse_subscription_path(): - expected = { - "project": "cuttlefish", - "subscription": "mussel", - } - path = CloudBuildClient.subscription_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_subscription_path(path) - assert expected == actual - -def test_topic_path(): - project = "winkle" - topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - actual = CloudBuildClient.topic_path(project, topic) - assert expected == actual - - -def test_parse_topic_path(): - expected = { - "project": "scallop", - "topic": "abalone", - } - path = CloudBuildClient.topic_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_topic_path(path) - assert expected == actual - -def test_worker_pool_path(): - project = "squid" - location = "clam" - worker_pool = "whelk" - expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) - actual = CloudBuildClient.worker_pool_path(project, location, worker_pool) - assert expected == actual - - -def test_parse_worker_pool_path(): - expected = { - "project": "octopus", - "location": "oyster", - "worker_pool": "nudibranch", - } - path = CloudBuildClient.worker_pool_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_worker_pool_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudBuildClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = CloudBuildClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudBuildClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = CloudBuildClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudBuildClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = CloudBuildClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = CloudBuildClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = CloudBuildClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudBuildClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = CloudBuildClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudBuildClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index a0cf72db..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/devtools/cloudbuild/__init__.py - google/cloud/devtools/cloudbuild/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 6f731ec0..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/devtools/cloudbuild *.py -recursive-include google/cloud/devtools/cloudbuild_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index c788a1b3..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Devtools Cloudbuild API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Devtools Cloudbuild API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst deleted file mode 100644 index f4d9c5e2..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst +++ /dev/null @@ -1,10 +0,0 @@ -RepositoryManager ------------------------------------ - -.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager - :members: - :inherited-members: - -.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst deleted file mode 100644 index c055be1a..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Devtools Cloudbuild v2 API -==================================================== -.. toctree:: - :maxdepth: 2 - - repository_manager diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst deleted file mode 100644 index 2148aa78..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Devtools Cloudbuild v2 API -================================================= - -.. automodule:: google.cloud.devtools.cloudbuild_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index 4bd8e2dd..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-build documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-build" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Devtools Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-build-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-build.tex", - u"google-cloud-build Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-build", - u"Google Cloud Devtools Cloudbuild Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-build", - u"google-cloud-build Documentation", - author, - "google-cloud-build", - "GAPIC library for Google Cloud Devtools Cloudbuild API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index 476758ee..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - cloudbuild_v2/services - cloudbuild_v2/types diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py deleted file mode 100644 index 47a5d13c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.devtools.cloudbuild_v2.services.repository_manager.client import RepositoryManagerClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager.async_client import RepositoryManagerAsyncClient - -from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import OperationMetadata -from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import RunWorkflowCustomOperationMetadata -from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import Connection -from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import GetConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import GetRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubEnterpriseConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitLabConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import InstallationState -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import OAuthCredential -from google.cloud.devtools.cloudbuild_v2.types.repositories import ProcessWebhookRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import Repository -from google.cloud.devtools.cloudbuild_v2.types.repositories import ServiceDirectoryConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import UpdateConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import UserCredential - -__all__ = ('RepositoryManagerClient', - 'RepositoryManagerAsyncClient', - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'Connection', - 'CreateConnectionRequest', - 'CreateRepositoryRequest', - 'DeleteConnectionRequest', - 'DeleteRepositoryRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenRequest', - 'FetchReadWriteTokenResponse', - 'GetConnectionRequest', - 'GetRepositoryRequest', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'InstallationState', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'OAuthCredential', - 'ProcessWebhookRequest', - 'Repository', - 'ServiceDirectoryConfig', - 'UpdateConnectionRequest', - 'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py deleted file mode 100644 index 6745dc72..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.repository_manager import RepositoryManagerClient -from .services.repository_manager import RepositoryManagerAsyncClient - -from .types.cloudbuild import OperationMetadata -from .types.cloudbuild import RunWorkflowCustomOperationMetadata -from .types.repositories import BatchCreateRepositoriesRequest -from .types.repositories import BatchCreateRepositoriesResponse -from .types.repositories import Connection -from .types.repositories import CreateConnectionRequest -from .types.repositories import CreateRepositoryRequest -from .types.repositories import DeleteConnectionRequest -from .types.repositories import DeleteRepositoryRequest -from .types.repositories import FetchGitRefsRequest -from .types.repositories import FetchGitRefsResponse -from .types.repositories import FetchLinkableRepositoriesRequest -from .types.repositories import FetchLinkableRepositoriesResponse -from .types.repositories import FetchReadTokenRequest -from .types.repositories import FetchReadTokenResponse -from .types.repositories import FetchReadWriteTokenRequest -from .types.repositories import FetchReadWriteTokenResponse -from .types.repositories import GetConnectionRequest -from .types.repositories import GetRepositoryRequest -from .types.repositories import GitHubConfig -from .types.repositories import GitHubEnterpriseConfig -from .types.repositories import GitLabConfig -from .types.repositories import InstallationState -from .types.repositories import ListConnectionsRequest -from .types.repositories import ListConnectionsResponse -from .types.repositories import ListRepositoriesRequest -from .types.repositories import ListRepositoriesResponse -from .types.repositories import OAuthCredential -from .types.repositories import ProcessWebhookRequest -from .types.repositories import Repository -from .types.repositories import ServiceDirectoryConfig -from .types.repositories import UpdateConnectionRequest -from .types.repositories import UserCredential - -__all__ = ( - 'RepositoryManagerAsyncClient', -'BatchCreateRepositoriesRequest', -'BatchCreateRepositoriesResponse', -'Connection', -'CreateConnectionRequest', -'CreateRepositoryRequest', -'DeleteConnectionRequest', -'DeleteRepositoryRequest', -'FetchGitRefsRequest', -'FetchGitRefsResponse', -'FetchLinkableRepositoriesRequest', -'FetchLinkableRepositoriesResponse', -'FetchReadTokenRequest', -'FetchReadTokenResponse', -'FetchReadWriteTokenRequest', -'FetchReadWriteTokenResponse', -'GetConnectionRequest', -'GetRepositoryRequest', -'GitHubConfig', -'GitHubEnterpriseConfig', -'GitLabConfig', -'InstallationState', -'ListConnectionsRequest', -'ListConnectionsResponse', -'ListRepositoriesRequest', -'ListRepositoriesResponse', -'OAuthCredential', -'OperationMetadata', -'ProcessWebhookRequest', -'Repository', -'RepositoryManagerClient', -'RunWorkflowCustomOperationMetadata', -'ServiceDirectoryConfig', -'UpdateConnectionRequest', -'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json deleted file mode 100644 index 2e77ddd0..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json +++ /dev/null @@ -1,238 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.devtools.cloudbuild_v2", - "protoPackage": "google.devtools.cloudbuild.v2", - "schema": "1.0", - "services": { - "RepositoryManager": { - "clients": { - "grpc": { - "libraryClient": "RepositoryManagerClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "grpc-async": { - "libraryClient": "RepositoryManagerAsyncClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "rest": { - "libraryClient": "RepositoryManagerClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py deleted file mode 100644 index 89a37dc9..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py deleted file mode 100644 index 4477dbda..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import RepositoryManagerClient -from .async_client import RepositoryManagerAsyncClient - -__all__ = ( - 'RepositoryManagerClient', - 'RepositoryManagerAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py deleted file mode 100644 index f0355efe..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py +++ /dev/null @@ -1,2257 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .client import RepositoryManagerClient - - -class RepositoryManagerAsyncClient: - """Manages connections to source code repositories.""" - - _client: RepositoryManagerClient - - DEFAULT_ENDPOINT = RepositoryManagerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = RepositoryManagerClient.DEFAULT_MTLS_ENDPOINT - - connection_path = staticmethod(RepositoryManagerClient.connection_path) - parse_connection_path = staticmethod(RepositoryManagerClient.parse_connection_path) - repository_path = staticmethod(RepositoryManagerClient.repository_path) - parse_repository_path = staticmethod(RepositoryManagerClient.parse_repository_path) - secret_version_path = staticmethod(RepositoryManagerClient.secret_version_path) - parse_secret_version_path = staticmethod(RepositoryManagerClient.parse_secret_version_path) - service_path = staticmethod(RepositoryManagerClient.service_path) - parse_service_path = staticmethod(RepositoryManagerClient.parse_service_path) - common_billing_account_path = staticmethod(RepositoryManagerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(RepositoryManagerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(RepositoryManagerClient.common_folder_path) - parse_common_folder_path = staticmethod(RepositoryManagerClient.parse_common_folder_path) - common_organization_path = staticmethod(RepositoryManagerClient.common_organization_path) - parse_common_organization_path = staticmethod(RepositoryManagerClient.parse_common_organization_path) - common_project_path = staticmethod(RepositoryManagerClient.common_project_path) - parse_common_project_path = staticmethod(RepositoryManagerClient.parse_common_project_path) - common_location_path = staticmethod(RepositoryManagerClient.common_location_path) - parse_common_location_path = staticmethod(RepositoryManagerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerAsyncClient: The constructed client. - """ - return RepositoryManagerClient.from_service_account_info.__func__(RepositoryManagerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerAsyncClient: The constructed client. - """ - return RepositoryManagerClient.from_service_account_file.__func__(RepositoryManagerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return RepositoryManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> RepositoryManagerTransport: - """Returns the transport used by the client instance. - - Returns: - RepositoryManagerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, RepositoryManagerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the repository manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.RepositoryManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = RepositoryManagerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_connection(self, - request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[repositories.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]]): - The request object. Message for creating a Connection - parent (:class:`str`): - Required. Project and location where the connection will - be created. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): - Required. The Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (:class:`str`): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. - Allows alphanumeric characters and any of - -._~%!$&'()*+,;=@. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.CreateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_connection(self, - request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Connection: - r"""Gets details of a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]]): - The request object. Message for getting the details of a - Connection. - name (:class:`str`): - Required. The name of the Connection to retrieve. - Format: ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.GetConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_connection, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_connections(self, - request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionsAsyncPager: - r"""Lists Connections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]]): - The request object. Message for requesting list of - Connections. - parent (:class:`str`): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager: - Message for response to listing - Connections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.ListConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_connections, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_connection(self, - request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, - *, - connection: Optional[repositories.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]]): - The request object. Message for updating a Connection. - connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): - Required. The Connection to update. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.UpdateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection.name", request.connection.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_connection(self, - request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]]): - The request object. Message for deleting a Connection. - name (:class:`str`): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.DeleteConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_repository(self, - request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - repository: Optional[repositories.Repository] = None, - repository_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a Repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]]): - The request object. Message for creating a Repository. - parent (:class:`str`): - Required. The connection to contain - the repository. If the request is part - of a BatchCreateRepositoriesRequest, - this field should be empty or match the - parent specified there. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository (:class:`google.cloud.devtools.cloudbuild_v2.types.Repository`): - Required. The repository to create. - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository_id (:class:`str`): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - - This corresponds to the ``repository_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` - A repository associated to a parent connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, repository, repository_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.CreateRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if repository is not None: - request.repository = repository - if repository_id is not None: - request.repository_id = repository_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_repository, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Repository, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def batch_create_repositories(self, - request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates multiple repositories inside a connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]]): - The request object. Message for creating repositoritories - in batch. - parent (:class:`str`): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field - in the CreateRepositoryRequest messages must either be - empty or match this field. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - requests (:class:`MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]`): - Required. The request messages - specifying the repositories to create. - - This corresponds to the ``requests`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` - Message for response of creating repositories in batch. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, requests]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.BatchCreateRepositoriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if requests: - request.requests.extend(requests) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_create_repositories, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.BatchCreateRepositoriesResponse, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_repository(self, - request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Repository: - r"""Gets details of a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_repository(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]]): - The request object. Message for getting the details of a - Repository. - name (:class:`str`): - Required. The name of the Repository to retrieve. - Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Repository: - A repository associated to a parent - connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.GetRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_repository, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_repositories(self, - request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListRepositoriesAsyncPager: - r"""Lists Repositories in a given connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]]): - The request object. Message for requesting list of - Repositories. - parent (:class:`str`): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager: - Message for response to listing - Repositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.ListRepositoriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListRepositoriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_repository(self, - request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]]): - The request object. Message for deleting a Repository. - name (:class:`str`): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.DeleteRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_repository, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def fetch_read_write_token(self, - request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Fetches read/write token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]]): - The request object. Message for fetching SCM read/write - token. - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchReadWriteTokenRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_read_write_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_read_token(self, - request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadTokenResponse: - r"""Fetches read token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_token(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]]): - The request object. Message for fetching SCM read token. - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchReadTokenRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_read_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_linkable_repositories(self, - request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchLinkableRepositoriesAsyncPager: - r"""FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]]): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager: - Response message for - FetchLinkableRepositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = repositories.FetchLinkableRepositoriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_linkable_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection", request.connection), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.FetchLinkableRepositoriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_git_refs(self, - request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchGitRefsResponse: - r"""Fetch the list of branches or tags for a given - repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_git_refs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]]): - The request object. Request for fetching git refs - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: - Response for fetching git refs - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchGitRefsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_git_refs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "RepositoryManagerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "RepositoryManagerAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py deleted file mode 100644 index 79743795..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ /dev/null @@ -1,2445 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import RepositoryManagerGrpcTransport -from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .transports.rest import RepositoryManagerRestTransport - - -class RepositoryManagerClientMeta(type): - """Metaclass for the RepositoryManager client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] - _transport_registry["grpc"] = RepositoryManagerGrpcTransport - _transport_registry["grpc_asyncio"] = RepositoryManagerGrpcAsyncIOTransport - _transport_registry["rest"] = RepositoryManagerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[RepositoryManagerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class RepositoryManagerClient(metaclass=RepositoryManagerClientMeta): - """Manages connections to source code repositories.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> RepositoryManagerTransport: - """Returns the transport used by the client instance. - - Returns: - RepositoryManagerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def repository_path(project: str,location: str,connection: str,repository: str,) -> str: - """Returns a fully-qualified repository string.""" - return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - - @staticmethod - def parse_repository_path(path: str) -> Dict[str,str]: - """Parses a repository path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def secret_version_path(project: str,secret: str,version: str,) -> str: - """Returns a fully-qualified secret_version string.""" - return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - - @staticmethod - def parse_secret_version_path(path: str) -> Dict[str,str]: - """Parses a secret_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,namespace: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RepositoryManagerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the repository manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, RepositoryManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, RepositoryManagerTransport): - # transport is a RepositoryManagerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_connection(self, - request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[repositories.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]): - The request object. Message for creating a Connection - parent (str): - Required. Project and location where the connection will - be created. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (str): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. - Allows alphanumeric characters and any of - -._~%!$&'()*+,;=@. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.CreateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.CreateConnectionRequest): - request = repositories.CreateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_connection(self, - request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Connection: - r"""Gets details of a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]): - The request object. Message for getting the details of a - Connection. - name (str): - Required. The name of the Connection to retrieve. - Format: ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.GetConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.GetConnectionRequest): - request = repositories.GetConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_connections(self, - request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionsPager: - r"""Lists Connections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]): - The request object. Message for requesting list of - Connections. - parent (str): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager: - Message for response to listing - Connections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.ListConnectionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.ListConnectionsRequest): - request = repositories.ListConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConnectionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_connection(self, - request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, - *, - connection: Optional[repositories.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]): - The request object. Message for updating a Connection. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to update. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.UpdateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.UpdateConnectionRequest): - request = repositories.UpdateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection.name", request.connection.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_connection(self, - request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]): - The request object. Message for deleting a Connection. - name (str): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.DeleteConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.DeleteConnectionRequest): - request = repositories.DeleteConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_repository(self, - request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - repository: Optional[repositories.Repository] = None, - repository_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a Repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]): - The request object. Message for creating a Repository. - parent (str): - Required. The connection to contain - the repository. If the request is part - of a BatchCreateRepositoriesRequest, - this field should be empty or match the - parent specified there. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository (google.cloud.devtools.cloudbuild_v2.types.Repository): - Required. The repository to create. - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository_id (str): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - - This corresponds to the ``repository_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` - A repository associated to a parent connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, repository, repository_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.CreateRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.CreateRepositoryRequest): - request = repositories.CreateRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if repository is not None: - request.repository = repository - if repository_id is not None: - request.repository_id = repository_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Repository, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def batch_create_repositories(self, - request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates multiple repositories inside a connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]): - The request object. Message for creating repositoritories - in batch. - parent (str): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field - in the CreateRepositoryRequest messages must either be - empty or match this field. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): - Required. The request messages - specifying the repositories to create. - - This corresponds to the ``requests`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` - Message for response of creating repositories in batch. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, requests]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.BatchCreateRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.BatchCreateRepositoriesRequest): - request = repositories.BatchCreateRepositoriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if requests is not None: - request.requests = requests - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_create_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.BatchCreateRepositoriesResponse, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_repository(self, - request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Repository: - r"""Gets details of a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = client.get_repository(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]): - The request object. Message for getting the details of a - Repository. - name (str): - Required. The name of the Repository to retrieve. - Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Repository: - A repository associated to a parent - connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.GetRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.GetRepositoryRequest): - request = repositories.GetRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_repositories(self, - request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListRepositoriesPager: - r"""Lists Repositories in a given connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]): - The request object. Message for requesting list of - Repositories. - parent (str): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager: - Message for response to listing - Repositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.ListRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.ListRepositoriesRequest): - request = repositories.ListRepositoriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListRepositoriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_repository(self, - request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]): - The request object. Message for deleting a Repository. - name (str): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.DeleteRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.DeleteRepositoryRequest): - request = repositories.DeleteRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def fetch_read_write_token(self, - request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Fetches read/write token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]): - The request object. Message for fetching SCM read/write - token. - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchReadWriteTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchReadWriteTokenRequest): - request = repositories.FetchReadWriteTokenRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_read_write_token] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_read_token(self, - request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadTokenResponse: - r"""Fetches read token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_token(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]): - The request object. Message for fetching SCM read token. - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchReadTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchReadTokenRequest): - request = repositories.FetchReadTokenRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_read_token] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_linkable_repositories(self, - request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchLinkableRepositoriesPager: - r"""FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager: - Response message for - FetchLinkableRepositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchLinkableRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchLinkableRepositoriesRequest): - request = repositories.FetchLinkableRepositoriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_linkable_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection", request.connection), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.FetchLinkableRepositoriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_git_refs(self, - request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchGitRefsResponse: - r"""Fetch the list of branches or tags for a given - repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_git_refs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]): - The request object. Request for fetching git refs - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: - Response for fetching git refs - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchGitRefsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchGitRefsRequest): - request = repositories.FetchGitRefsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_git_refs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "RepositoryManagerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "RepositoryManagerClient", -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py deleted file mode 100644 index 1ae879b6..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.devtools.cloudbuild_v2.types import repositories - - -class ListConnectionsPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.ListConnectionsResponse], - request: repositories.ListConnectionsRequest, - response: repositories.ListConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Connection]: - for page in self.pages: - yield from page.connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionsAsyncPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.ListConnectionsResponse]], - request: repositories.ListConnectionsRequest, - response: repositories.ListConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Connection]: - async def async_generator(): - async for page in self.pages: - for response in page.connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListRepositoriesPager: - """A pager for iterating through ``list_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.ListRepositoriesResponse], - request: repositories.ListRepositoriesRequest, - response: repositories.ListRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.ListRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Repository]: - for page in self.pages: - yield from page.repositories - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListRepositoriesAsyncPager: - """A pager for iterating through ``list_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.ListRepositoriesResponse]], - request: repositories.ListRepositoriesRequest, - response: repositories.ListRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.ListRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Repository]: - async def async_generator(): - async for page in self.pages: - for response in page.repositories: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchLinkableRepositoriesPager: - """A pager for iterating through ``fetch_linkable_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``FetchLinkableRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.FetchLinkableRepositoriesResponse], - request: repositories.FetchLinkableRepositoriesRequest, - response: repositories.FetchLinkableRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.FetchLinkableRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.FetchLinkableRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Repository]: - for page in self.pages: - yield from page.repositories - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchLinkableRepositoriesAsyncPager: - """A pager for iterating through ``fetch_linkable_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``FetchLinkableRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.FetchLinkableRepositoriesResponse]], - request: repositories.FetchLinkableRepositoriesRequest, - response: repositories.FetchLinkableRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.FetchLinkableRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.FetchLinkableRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Repository]: - async def async_generator(): - async for page in self.pages: - for response in page.repositories: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py deleted file mode 100644 index b912a799..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import RepositoryManagerTransport -from .grpc import RepositoryManagerGrpcTransport -from .grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .rest import RepositoryManagerRestTransport -from .rest import RepositoryManagerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] -_transport_registry['grpc'] = RepositoryManagerGrpcTransport -_transport_registry['grpc_asyncio'] = RepositoryManagerGrpcAsyncIOTransport -_transport_registry['rest'] = RepositoryManagerRestTransport - -__all__ = ( - 'RepositoryManagerTransport', - 'RepositoryManagerGrpcTransport', - 'RepositoryManagerGrpcAsyncIOTransport', - 'RepositoryManagerRestTransport', - 'RepositoryManagerRestInterceptor', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py deleted file mode 100644 index 28b1d7a8..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class RepositoryManagerTransport(abc.ABC): - """Abstract transport class for RepositoryManager.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudbuild.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_connection: gapic_v1.method.wrap_method( - self.create_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_connection: gapic_v1.method.wrap_method( - self.get_connection, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_connections: gapic_v1.method.wrap_method( - self.list_connections, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_connection: gapic_v1.method.wrap_method( - self.update_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_connection: gapic_v1.method.wrap_method( - self.delete_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.create_repository: gapic_v1.method.wrap_method( - self.create_repository, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_create_repositories: gapic_v1.method.wrap_method( - self.batch_create_repositories, - default_timeout=None, - client_info=client_info, - ), - self.get_repository: gapic_v1.method.wrap_method( - self.get_repository, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_repositories: gapic_v1.method.wrap_method( - self.list_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_repository: gapic_v1.method.wrap_method( - self.delete_repository, - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_read_write_token: gapic_v1.method.wrap_method( - self.fetch_read_write_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_read_token: gapic_v1.method.wrap_method( - self.fetch_read_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_linkable_repositories: gapic_v1.method.wrap_method( - self.fetch_linkable_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_git_refs: gapic_v1.method.wrap_method( - self.fetch_git_refs, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - Union[ - repositories.Connection, - Awaitable[repositories.Connection] - ]]: - raise NotImplementedError() - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - Union[ - repositories.ListConnectionsResponse, - Awaitable[repositories.ListConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - Union[ - repositories.Repository, - Awaitable[repositories.Repository] - ]]: - raise NotImplementedError() - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - Union[ - repositories.ListRepositoriesResponse, - Awaitable[repositories.ListRepositoriesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - Union[ - repositories.FetchReadWriteTokenResponse, - Awaitable[repositories.FetchReadWriteTokenResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - Union[ - repositories.FetchReadTokenResponse, - Awaitable[repositories.FetchReadTokenResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - Union[ - repositories.FetchLinkableRepositoriesResponse, - Awaitable[repositories.FetchLinkableRepositoriesResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - Union[ - repositories.FetchGitRefsResponse, - Awaitable[repositories.FetchGitRefsResponse] - ]]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'RepositoryManagerTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py deleted file mode 100644 index 2507acae..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py +++ /dev/null @@ -1,743 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO - - -class RepositoryManagerGrpcTransport(RepositoryManagerTransport): - """gRPC backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create connection method over gRPC. - - Creates a Connection. - - Returns: - Callable[[~.CreateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', - request_serializer=repositories.CreateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - repositories.Connection]: - r"""Return a callable for the get connection method over gRPC. - - Gets details of a single connection. - - Returns: - Callable[[~.GetConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', - request_serializer=repositories.GetConnectionRequest.serialize, - response_deserializer=repositories.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - repositories.ListConnectionsResponse]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a given project and location. - - Returns: - Callable[[~.ListConnectionsRequest], - ~.ListConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', - request_serializer=repositories.ListConnectionsRequest.serialize, - response_deserializer=repositories.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the update connection method over gRPC. - - Updates a single connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', - request_serializer=repositories.UpdateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes a single connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', - request_serializer=repositories.DeleteConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection'] - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - operations_pb2.Operation]: - r"""Return a callable for the create repository method over gRPC. - - Creates a Repository. - - Returns: - Callable[[~.CreateRepositoryRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_repository' not in self._stubs: - self._stubs['create_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', - request_serializer=repositories.CreateRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_repository'] - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - operations_pb2.Operation]: - r"""Return a callable for the batch create repositories method over gRPC. - - Creates multiple repositories inside a connection. - - Returns: - Callable[[~.BatchCreateRepositoriesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_repositories' not in self._stubs: - self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', - request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_create_repositories'] - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - repositories.Repository]: - r"""Return a callable for the get repository method over gRPC. - - Gets details of a single repository. - - Returns: - Callable[[~.GetRepositoryRequest], - ~.Repository]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_repository' not in self._stubs: - self._stubs['get_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', - request_serializer=repositories.GetRepositoryRequest.serialize, - response_deserializer=repositories.Repository.deserialize, - ) - return self._stubs['get_repository'] - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - repositories.ListRepositoriesResponse]: - r"""Return a callable for the list repositories method over gRPC. - - Lists Repositories in a given connection. - - Returns: - Callable[[~.ListRepositoriesRequest], - ~.ListRepositoriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_repositories' not in self._stubs: - self._stubs['list_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', - request_serializer=repositories.ListRepositoriesRequest.serialize, - response_deserializer=repositories.ListRepositoriesResponse.deserialize, - ) - return self._stubs['list_repositories'] - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete repository method over gRPC. - - Deletes a single repository. - - Returns: - Callable[[~.DeleteRepositoryRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_repository' not in self._stubs: - self._stubs['delete_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', - request_serializer=repositories.DeleteRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_repository'] - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - repositories.FetchReadWriteTokenResponse]: - r"""Return a callable for the fetch read write token method over gRPC. - - Fetches read/write token of a given repository. - - Returns: - Callable[[~.FetchReadWriteTokenRequest], - ~.FetchReadWriteTokenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_write_token' not in self._stubs: - self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', - request_serializer=repositories.FetchReadWriteTokenRequest.serialize, - response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, - ) - return self._stubs['fetch_read_write_token'] - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - repositories.FetchReadTokenResponse]: - r"""Return a callable for the fetch read token method over gRPC. - - Fetches read token of a given repository. - - Returns: - Callable[[~.FetchReadTokenRequest], - ~.FetchReadTokenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_token' not in self._stubs: - self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', - request_serializer=repositories.FetchReadTokenRequest.serialize, - response_deserializer=repositories.FetchReadTokenResponse.deserialize, - ) - return self._stubs['fetch_read_token'] - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - repositories.FetchLinkableRepositoriesResponse]: - r"""Return a callable for the fetch linkable repositories method over gRPC. - - FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - Returns: - Callable[[~.FetchLinkableRepositoriesRequest], - ~.FetchLinkableRepositoriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_linkable_repositories' not in self._stubs: - self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', - request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, - response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, - ) - return self._stubs['fetch_linkable_repositories'] - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - repositories.FetchGitRefsResponse]: - r"""Return a callable for the fetch git refs method over gRPC. - - Fetch the list of branches or tags for a given - repository. - - Returns: - Callable[[~.FetchGitRefsRequest], - ~.FetchGitRefsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_git_refs' not in self._stubs: - self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', - request_serializer=repositories.FetchGitRefsRequest.serialize, - response_deserializer=repositories.FetchGitRefsResponse.deserialize, - ) - return self._stubs['fetch_git_refs'] - - def close(self): - self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'RepositoryManagerGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py deleted file mode 100644 index 55d562a9..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py +++ /dev/null @@ -1,742 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .grpc import RepositoryManagerGrpcTransport - - -class RepositoryManagerGrpcAsyncIOTransport(RepositoryManagerTransport): - """gRPC AsyncIO backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create connection method over gRPC. - - Creates a Connection. - - Returns: - Callable[[~.CreateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', - request_serializer=repositories.CreateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - Awaitable[repositories.Connection]]: - r"""Return a callable for the get connection method over gRPC. - - Gets details of a single connection. - - Returns: - Callable[[~.GetConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', - request_serializer=repositories.GetConnectionRequest.serialize, - response_deserializer=repositories.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - Awaitable[repositories.ListConnectionsResponse]]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a given project and location. - - Returns: - Callable[[~.ListConnectionsRequest], - Awaitable[~.ListConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', - request_serializer=repositories.ListConnectionsRequest.serialize, - response_deserializer=repositories.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update connection method over gRPC. - - Updates a single connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', - request_serializer=repositories.UpdateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes a single connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', - request_serializer=repositories.DeleteConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection'] - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create repository method over gRPC. - - Creates a Repository. - - Returns: - Callable[[~.CreateRepositoryRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_repository' not in self._stubs: - self._stubs['create_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', - request_serializer=repositories.CreateRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_repository'] - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the batch create repositories method over gRPC. - - Creates multiple repositories inside a connection. - - Returns: - Callable[[~.BatchCreateRepositoriesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_repositories' not in self._stubs: - self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', - request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_create_repositories'] - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - Awaitable[repositories.Repository]]: - r"""Return a callable for the get repository method over gRPC. - - Gets details of a single repository. - - Returns: - Callable[[~.GetRepositoryRequest], - Awaitable[~.Repository]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_repository' not in self._stubs: - self._stubs['get_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', - request_serializer=repositories.GetRepositoryRequest.serialize, - response_deserializer=repositories.Repository.deserialize, - ) - return self._stubs['get_repository'] - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - Awaitable[repositories.ListRepositoriesResponse]]: - r"""Return a callable for the list repositories method over gRPC. - - Lists Repositories in a given connection. - - Returns: - Callable[[~.ListRepositoriesRequest], - Awaitable[~.ListRepositoriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_repositories' not in self._stubs: - self._stubs['list_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', - request_serializer=repositories.ListRepositoriesRequest.serialize, - response_deserializer=repositories.ListRepositoriesResponse.deserialize, - ) - return self._stubs['list_repositories'] - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete repository method over gRPC. - - Deletes a single repository. - - Returns: - Callable[[~.DeleteRepositoryRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_repository' not in self._stubs: - self._stubs['delete_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', - request_serializer=repositories.DeleteRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_repository'] - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - Awaitable[repositories.FetchReadWriteTokenResponse]]: - r"""Return a callable for the fetch read write token method over gRPC. - - Fetches read/write token of a given repository. - - Returns: - Callable[[~.FetchReadWriteTokenRequest], - Awaitable[~.FetchReadWriteTokenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_write_token' not in self._stubs: - self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', - request_serializer=repositories.FetchReadWriteTokenRequest.serialize, - response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, - ) - return self._stubs['fetch_read_write_token'] - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - Awaitable[repositories.FetchReadTokenResponse]]: - r"""Return a callable for the fetch read token method over gRPC. - - Fetches read token of a given repository. - - Returns: - Callable[[~.FetchReadTokenRequest], - Awaitable[~.FetchReadTokenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_token' not in self._stubs: - self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', - request_serializer=repositories.FetchReadTokenRequest.serialize, - response_deserializer=repositories.FetchReadTokenResponse.deserialize, - ) - return self._stubs['fetch_read_token'] - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - Awaitable[repositories.FetchLinkableRepositoriesResponse]]: - r"""Return a callable for the fetch linkable repositories method over gRPC. - - FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - Returns: - Callable[[~.FetchLinkableRepositoriesRequest], - Awaitable[~.FetchLinkableRepositoriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_linkable_repositories' not in self._stubs: - self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', - request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, - response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, - ) - return self._stubs['fetch_linkable_repositories'] - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - Awaitable[repositories.FetchGitRefsResponse]]: - r"""Return a callable for the fetch git refs method over gRPC. - - Fetch the list of branches or tags for a given - repository. - - Returns: - Callable[[~.FetchGitRefsRequest], - Awaitable[~.FetchGitRefsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_git_refs' not in self._stubs: - self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', - request_serializer=repositories.FetchGitRefsRequest.serialize, - response_deserializer=repositories.FetchGitRefsResponse.deserialize, - ) - return self._stubs['fetch_git_refs'] - - def close(self): - return self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'RepositoryManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py deleted file mode 100644 index 365c9861..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py +++ /dev/null @@ -1,2275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.longrunning import operations_pb2 # type: ignore - -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class RepositoryManagerRestInterceptor: - """Interceptor for RepositoryManager. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the RepositoryManagerRestTransport. - - .. code-block:: python - class MyCustomRepositoryManagerInterceptor(RepositoryManagerRestInterceptor): - def pre_batch_create_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_create_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_git_refs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_git_refs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_linkable_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_linkable_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_read_token(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_read_token(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_read_write_token(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_read_write_token(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_connection(self, response): - logging.log(f"Received response: {response}") - return response - - transport = RepositoryManagerRestTransport(interceptor=MyCustomRepositoryManagerInterceptor()) - client = RepositoryManagerClient(transport=transport) - - - """ - def pre_batch_create_repositories(self, request: repositories.BatchCreateRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.BatchCreateRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for batch_create_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_batch_create_repositories(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for batch_create_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_create_connection(self, request: repositories.CreateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_create_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_create_repository(self, request: repositories.CreateRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_create_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_delete_connection(self, request: repositories.DeleteConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_delete_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_delete_repository(self, request: repositories.DeleteRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_delete_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_git_refs(self, request: repositories.FetchGitRefsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchGitRefsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_git_refs - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_git_refs(self, response: repositories.FetchGitRefsResponse) -> repositories.FetchGitRefsResponse: - """Post-rpc interceptor for fetch_git_refs - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_linkable_repositories(self, request: repositories.FetchLinkableRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchLinkableRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_linkable_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_linkable_repositories(self, response: repositories.FetchLinkableRepositoriesResponse) -> repositories.FetchLinkableRepositoriesResponse: - """Post-rpc interceptor for fetch_linkable_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_read_token(self, request: repositories.FetchReadTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadTokenRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_read_token - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_read_token(self, response: repositories.FetchReadTokenResponse) -> repositories.FetchReadTokenResponse: - """Post-rpc interceptor for fetch_read_token - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_read_write_token(self, request: repositories.FetchReadWriteTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadWriteTokenRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_read_write_token - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_read_write_token(self, response: repositories.FetchReadWriteTokenResponse) -> repositories.FetchReadWriteTokenResponse: - """Post-rpc interceptor for fetch_read_write_token - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_connection(self, request: repositories.GetConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_connection(self, response: repositories.Connection) -> repositories.Connection: - """Post-rpc interceptor for get_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_repository(self, request: repositories.GetRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_repository(self, response: repositories.Repository) -> repositories.Repository: - """Post-rpc interceptor for get_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_list_connections(self, request: repositories.ListConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListConnectionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_list_connections(self, response: repositories.ListConnectionsResponse) -> repositories.ListConnectionsResponse: - """Post-rpc interceptor for list_connections - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_list_repositories(self, request: repositories.ListRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_list_repositories(self, response: repositories.ListRepositoriesResponse) -> repositories.ListRepositoriesResponse: - """Post-rpc interceptor for list_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_update_connection(self, request: repositories.UpdateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_update_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class RepositoryManagerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: RepositoryManagerRestInterceptor - - -class RepositoryManagerRestTransport(RepositoryManagerTransport): - """REST backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[RepositoryManagerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or RepositoryManagerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v2") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _BatchCreateRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("BatchCreateRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.BatchCreateRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the batch create repositories method over HTTP. - - Args: - request (~.repositories.BatchCreateRepositoriesRequest): - The request object. Message for creating repositoritories - in batch. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_batch_create_repositories(request, metadata) - pb_request = repositories.BatchCreateRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_create_repositories(resp) - return resp - - class _CreateConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("CreateConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "connectionId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.CreateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create connection method over HTTP. - - Args: - request (~.repositories.CreateConnectionRequest): - The request object. Message for creating a Connection - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - 'body': 'connection', - }, - ] - request, metadata = self._interceptor.pre_create_connection(request, metadata) - pb_request = repositories.CreateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_connection(resp) - return resp - - class _CreateRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("CreateRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "repositoryId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.CreateRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create repository method over HTTP. - - Args: - request (~.repositories.CreateRepositoryRequest): - The request object. Message for creating a Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', - 'body': 'repository', - }, - ] - request, metadata = self._interceptor.pre_create_repository(request, metadata) - pb_request = repositories.CreateRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_repository(resp) - return resp - - class _DeleteConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("DeleteConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.DeleteConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete connection method over HTTP. - - Args: - request (~.repositories.DeleteConnectionRequest): - The request object. Message for deleting a Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_connection(request, metadata) - pb_request = repositories.DeleteConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_connection(resp) - return resp - - class _DeleteRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("DeleteRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.DeleteRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete repository method over HTTP. - - Args: - request (~.repositories.DeleteRepositoryRequest): - The request object. Message for deleting a Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_repository(request, metadata) - pb_request = repositories.DeleteRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_repository(resp) - return resp - - class _FetchGitRefs(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchGitRefs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchGitRefsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchGitRefsResponse: - r"""Call the fetch git refs method over HTTP. - - Args: - request (~.repositories.FetchGitRefsRequest): - The request object. Request for fetching git refs - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchGitRefsResponse: - Response for fetching git refs - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs', - }, - ] - request, metadata = self._interceptor.pre_fetch_git_refs(request, metadata) - pb_request = repositories.FetchGitRefsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchGitRefsResponse() - pb_resp = repositories.FetchGitRefsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_git_refs(resp) - return resp - - class _FetchLinkableRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchLinkableRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchLinkableRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchLinkableRepositoriesResponse: - r"""Call the fetch linkable - repositories method over HTTP. - - Args: - request (~.repositories.FetchLinkableRepositoriesRequest): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchLinkableRepositoriesResponse: - Response message for - FetchLinkableRepositories. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{connection=projects/*/locations/*/connections/*}:fetchLinkableRepositories', - }, - ] - request, metadata = self._interceptor.pre_fetch_linkable_repositories(request, metadata) - pb_request = repositories.FetchLinkableRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchLinkableRepositoriesResponse() - pb_resp = repositories.FetchLinkableRepositoriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_linkable_repositories(resp) - return resp - - class _FetchReadToken(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchReadToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchReadTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchReadTokenResponse: - r"""Call the fetch read token method over HTTP. - - Args: - request (~.repositories.FetchReadTokenRequest): - The request object. Message for fetching SCM read token. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_fetch_read_token(request, metadata) - pb_request = repositories.FetchReadTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchReadTokenResponse() - pb_resp = repositories.FetchReadTokenResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_read_token(resp) - return resp - - class _FetchReadWriteToken(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchReadWriteToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchReadWriteTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Call the fetch read write token method over HTTP. - - Args: - request (~.repositories.FetchReadWriteTokenRequest): - The request object. Message for fetching SCM read/write - token. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_fetch_read_write_token(request, metadata) - pb_request = repositories.FetchReadWriteTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchReadWriteTokenResponse() - pb_resp = repositories.FetchReadWriteTokenResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_read_write_token(resp) - return resp - - class _GetConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("GetConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.GetConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.Connection: - r"""Call the get connection method over HTTP. - - Args: - request (~.repositories.GetConnectionRequest): - The request object. Message for getting the details of a - Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - ] - request, metadata = self._interceptor.pre_get_connection(request, metadata) - pb_request = repositories.GetConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.Connection() - pb_resp = repositories.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_connection(resp) - return resp - - class _GetRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("GetRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.GetRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.Repository: - r"""Call the get repository method over HTTP. - - Args: - request (~.repositories.GetRepositoryRequest): - The request object. Message for getting the details of a - Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.Repository: - A repository associated to a parent - connection. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', - }, - ] - request, metadata = self._interceptor.pre_get_repository(request, metadata) - pb_request = repositories.GetRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.Repository() - pb_resp = repositories.Repository.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_repository(resp) - return resp - - class _ListConnections(RepositoryManagerRestStub): - def __hash__(self): - return hash("ListConnections") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.ListConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.ListConnectionsResponse: - r"""Call the list connections method over HTTP. - - Args: - request (~.repositories.ListConnectionsRequest): - The request object. Message for requesting list of - Connections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.ListConnectionsResponse: - Message for response to listing - Connections. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - }, - ] - request, metadata = self._interceptor.pre_list_connections(request, metadata) - pb_request = repositories.ListConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.ListConnectionsResponse() - pb_resp = repositories.ListConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_connections(resp) - return resp - - class _ListRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("ListRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.ListRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.ListRepositoriesResponse: - r"""Call the list repositories method over HTTP. - - Args: - request (~.repositories.ListRepositoriesRequest): - The request object. Message for requesting list of - Repositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.ListRepositoriesResponse: - Message for response to listing - Repositories. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', - }, - ] - request, metadata = self._interceptor.pre_list_repositories(request, metadata) - pb_request = repositories.ListRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.ListRepositoriesResponse() - pb_resp = repositories.ListRepositoriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_repositories(resp) - return resp - - class _UpdateConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("UpdateConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.UpdateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update connection method over HTTP. - - Args: - request (~.repositories.UpdateConnectionRequest): - The request object. Message for updating a Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{connection.name=projects/*/locations/*/connections/*}', - 'body': 'connection', - }, - ] - request, metadata = self._interceptor.pre_update_connection(request, metadata) - pb_request = repositories.UpdateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_connection(resp) - return resp - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchCreateRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - repositories.FetchGitRefsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchGitRefs(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - repositories.FetchLinkableRepositoriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchLinkableRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - repositories.FetchReadTokenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchReadToken(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - repositories.FetchReadWriteTokenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchReadWriteToken(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - repositories.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - repositories.Repository]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - repositories.ListConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - repositories.ListRepositoriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:getIamPolicy', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(RepositoryManagerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(RepositoryManagerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'RepositoryManagerRestTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py deleted file mode 100644 index 1df6a863..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudbuild import ( - OperationMetadata, - RunWorkflowCustomOperationMetadata, -) -from .repositories import ( - BatchCreateRepositoriesRequest, - BatchCreateRepositoriesResponse, - Connection, - CreateConnectionRequest, - CreateRepositoryRequest, - DeleteConnectionRequest, - DeleteRepositoryRequest, - FetchGitRefsRequest, - FetchGitRefsResponse, - FetchLinkableRepositoriesRequest, - FetchLinkableRepositoriesResponse, - FetchReadTokenRequest, - FetchReadTokenResponse, - FetchReadWriteTokenRequest, - FetchReadWriteTokenResponse, - GetConnectionRequest, - GetRepositoryRequest, - GitHubConfig, - GitHubEnterpriseConfig, - GitLabConfig, - InstallationState, - ListConnectionsRequest, - ListConnectionsResponse, - ListRepositoriesRequest, - ListRepositoriesResponse, - OAuthCredential, - ProcessWebhookRequest, - Repository, - ServiceDirectoryConfig, - UpdateConnectionRequest, - UserCredential, -) - -__all__ = ( - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'Connection', - 'CreateConnectionRequest', - 'CreateRepositoryRequest', - 'DeleteConnectionRequest', - 'DeleteRepositoryRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenRequest', - 'FetchReadWriteTokenResponse', - 'GetConnectionRequest', - 'GetRepositoryRequest', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'InstallationState', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'OAuthCredential', - 'ProcessWebhookRequest', - 'Repository', - 'ServiceDirectoryConfig', - 'UpdateConnectionRequest', - 'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py deleted file mode 100644 index a016f0af..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v2', - manifest={ - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - }, -) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class RunWorkflowCustomOperationMetadata(proto.Message): - r"""Represents the custom metadata of the RunWorkflow - long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - verb (str): - Output only. Name of the verb executed by the - operation. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - target (str): - Output only. Server-defined resource path for - the target of the operation. - pipeline_run_id (str): - Output only. ID of the pipeline run created - by RunWorkflow. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - verb: str = proto.Field( - proto.STRING, - number=3, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=4, - ) - api_version: str = proto.Field( - proto.STRING, - number=5, - ) - target: str = proto.Field( - proto.STRING, - number=6, - ) - pipeline_run_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py deleted file mode 100644 index 6d5e147a..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py +++ /dev/null @@ -1,1104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v2', - manifest={ - 'Connection', - 'InstallationState', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'ServiceDirectoryConfig', - 'Repository', - 'OAuthCredential', - 'UserCredential', - 'CreateConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'UpdateConnectionRequest', - 'DeleteConnectionRequest', - 'CreateRepositoryRequest', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'GetRepositoryRequest', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'DeleteRepositoryRequest', - 'FetchReadWriteTokenRequest', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenResponse', - 'ProcessWebhookRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - }, -) - - -class Connection(proto.Message): - r"""A connection to a SCM like GitHub, GitHub Enterprise, - Bitbucket Server or GitLab. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Immutable. The resource name of the connection, in the - format - ``projects/{project}/locations/{location}/connections/{connection_id}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was updated. - github_config (google.cloud.devtools.cloudbuild_v2.types.GitHubConfig): - Configuration for connections to github.com. - - This field is a member of `oneof`_ ``connection_config``. - github_enterprise_config (google.cloud.devtools.cloudbuild_v2.types.GitHubEnterpriseConfig): - Configuration for connections to an instance - of GitHub Enterprise. - - This field is a member of `oneof`_ ``connection_config``. - gitlab_config (google.cloud.devtools.cloudbuild_v2.types.GitLabConfig): - Configuration for connections to gitlab.com - or an instance of GitLab Enterprise. - - This field is a member of `oneof`_ ``connection_config``. - installation_state (google.cloud.devtools.cloudbuild_v2.types.InstallationState): - Output only. Installation state of the - Connection. - disabled (bool): - If disabled is set to true, functionality is - disabled for this connection. Repository based - API methods and webhooks processing for - repositories in this connection will be - disabled. - reconciling (bool): - Output only. Set to true when the connection - is being set up or updated in the background. - annotations (MutableMapping[str, str]): - Allows clients to store small amounts of - arbitrary data. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - github_config: 'GitHubConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='connection_config', - message='GitHubConfig', - ) - github_enterprise_config: 'GitHubEnterpriseConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='connection_config', - message='GitHubEnterpriseConfig', - ) - gitlab_config: 'GitLabConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='connection_config', - message='GitLabConfig', - ) - installation_state: 'InstallationState' = proto.Field( - proto.MESSAGE, - number=12, - message='InstallationState', - ) - disabled: bool = proto.Field( - proto.BOOL, - number=13, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=14, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=15, - ) - etag: str = proto.Field( - proto.STRING, - number=16, - ) - - -class InstallationState(proto.Message): - r"""Describes stage and necessary actions to be taken by the - user to complete the installation. Used for GitHub and GitHub - Enterprise based connections. - - Attributes: - stage (google.cloud.devtools.cloudbuild_v2.types.InstallationState.Stage): - Output only. Current step of the installation - process. - message (str): - Output only. Message of what the user should - do next to continue the installation. Empty - string if the installation is already complete. - action_uri (str): - Output only. Link to follow for next action. - Empty string if the installation is already - complete. - """ - class Stage(proto.Enum): - r"""Stage of the installation process. - - Values: - STAGE_UNSPECIFIED (0): - No stage specified. - PENDING_CREATE_APP (1): - Only for GitHub Enterprise. An App creation - has been requested. The user needs to confirm - the creation in their GitHub enterprise host. - PENDING_USER_OAUTH (2): - User needs to authorize the GitHub (or - Enterprise) App via OAuth. - PENDING_INSTALL_APP (3): - User needs to follow the link to install the - GitHub (or Enterprise) App. - COMPLETE (10): - Installation process has been completed. - """ - STAGE_UNSPECIFIED = 0 - PENDING_CREATE_APP = 1 - PENDING_USER_OAUTH = 2 - PENDING_INSTALL_APP = 3 - COMPLETE = 10 - - stage: Stage = proto.Field( - proto.ENUM, - number=1, - enum=Stage, - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - action_uri: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchLinkableRepositoriesRequest(proto.Message): - r"""Request message for FetchLinkableRepositories. - - Attributes: - connection (str): - Required. The name of the Connection. Format: - ``projects/*/locations/*/connections/*``. - page_size (int): - Number of results to return in the list. - Default to 20. - page_token (str): - Page start. - """ - - connection: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchLinkableRepositoriesResponse(proto.Message): - r"""Response message for FetchLinkableRepositories. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - repositories ready to be created. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GitHubConfig(proto.Message): - r"""Configuration for connections to github.com. - - Attributes: - authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.OAuthCredential): - OAuth credential of the account that - authorized the Cloud Build GitHub App. It is - recommended to use a robot account instead of a - human user account. The OAuth token must be tied - to the Cloud Build GitHub App. - app_installation_id (int): - GitHub App installation id. - """ - - authorizer_credential: 'OAuthCredential' = proto.Field( - proto.MESSAGE, - number=1, - message='OAuthCredential', - ) - app_installation_id: int = proto.Field( - proto.INT64, - number=2, - ) - - -class GitHubEnterpriseConfig(proto.Message): - r"""Configuration for connections to an instance of GitHub - Enterprise. - - Attributes: - host_uri (str): - Required. The URI of the GitHub Enterprise - host this connection is for. - api_key (str): - Required. API Key used for authentication of - webhook events. - app_id (int): - Id of the GitHub App created from the - manifest. - app_slug (str): - The URL-friendly name of the GitHub App. - private_key_secret_version (str): - SecretManager resource containing the private key of the - GitHub App, formatted as - ``projects/*/secrets/*/versions/*``. - webhook_secret_secret_version (str): - SecretManager resource containing the webhook secret of the - GitHub App, formatted as - ``projects/*/secrets/*/versions/*``. - app_installation_id (int): - ID of the installation of the GitHub App. - service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): - Configuration for using Service Directory to - privately connect to a GitHub Enterprise server. - This should only be set if the GitHub Enterprise - server is hosted on-premises and not reachable - by public internet. If this field is left empty, - calls to the GitHub Enterprise server will be - made over the public internet. - ssl_ca (str): - SSL certificate to use for requests to GitHub - Enterprise. - server_version (str): - Output only. GitHub Enterprise version installed at the - host_uri. - """ - - host_uri: str = proto.Field( - proto.STRING, - number=1, - ) - api_key: str = proto.Field( - proto.STRING, - number=12, - ) - app_id: int = proto.Field( - proto.INT64, - number=2, - ) - app_slug: str = proto.Field( - proto.STRING, - number=13, - ) - private_key_secret_version: str = proto.Field( - proto.STRING, - number=4, - ) - webhook_secret_secret_version: str = proto.Field( - proto.STRING, - number=5, - ) - app_installation_id: int = proto.Field( - proto.INT64, - number=9, - ) - service_directory_config: 'ServiceDirectoryConfig' = proto.Field( - proto.MESSAGE, - number=10, - message='ServiceDirectoryConfig', - ) - ssl_ca: str = proto.Field( - proto.STRING, - number=11, - ) - server_version: str = proto.Field( - proto.STRING, - number=14, - ) - - -class GitLabConfig(proto.Message): - r"""Configuration for connections to gitlab.com or an instance of - GitLab Enterprise. - - Attributes: - host_uri (str): - The URI of the GitLab Enterprise host this - connection is for. If not specified, the default - value is https://gitlab.com. - webhook_secret_secret_version (str): - Required. Immutable. SecretManager resource containing the - webhook secret of a GitLab Enterprise project, formatted as - ``projects/*/secrets/*/versions/*``. - read_authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): - Required. A GitLab personal access token with the minimum - ``read_api`` scope access. - authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): - Required. A GitLab personal access token with the ``api`` - scope access. - service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): - Configuration for using Service Directory to - privately connect to a GitLab Enterprise server. - This should only be set if the GitLab Enterprise - server is hosted on-premises and not reachable - by public internet. If this field is left empty, - calls to the GitLab Enterprise server will be - made over the public internet. - ssl_ca (str): - SSL certificate to use for requests to GitLab - Enterprise. - server_version (str): - Output only. Version of the GitLab Enterprise server running - on the ``host_uri``. - """ - - host_uri: str = proto.Field( - proto.STRING, - number=1, - ) - webhook_secret_secret_version: str = proto.Field( - proto.STRING, - number=2, - ) - read_authorizer_credential: 'UserCredential' = proto.Field( - proto.MESSAGE, - number=3, - message='UserCredential', - ) - authorizer_credential: 'UserCredential' = proto.Field( - proto.MESSAGE, - number=4, - message='UserCredential', - ) - service_directory_config: 'ServiceDirectoryConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='ServiceDirectoryConfig', - ) - ssl_ca: str = proto.Field( - proto.STRING, - number=6, - ) - server_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ServiceDirectoryConfig(proto.Message): - r"""ServiceDirectoryConfig represents Service Directory - configuration for a connection. - - Attributes: - service (str): - Required. The Service Directory service name. - Format: - projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}. - """ - - service: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Repository(proto.Message): - r"""A repository associated to a parent connection. - - Attributes: - name (str): - Immutable. Resource name of the repository, in the format - ``projects/*/locations/*/connections/*/repositories/*``. - remote_uri (str): - Required. Git Clone HTTPS URI. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was updated. - annotations (MutableMapping[str, str]): - Allows clients to store small amounts of - arbitrary data. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - webhook_id (str): - Output only. External ID of the webhook - created for the repository. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - remote_uri: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - etag: str = proto.Field( - proto.STRING, - number=7, - ) - webhook_id: str = proto.Field( - proto.STRING, - number=8, - ) - - -class OAuthCredential(proto.Message): - r"""Represents an OAuth token of the account that authorized the - Connection, and associated metadata. - - Attributes: - oauth_token_secret_version (str): - A SecretManager resource containing the OAuth token that - authorizes the Cloud Build connection. Format: - ``projects/*/secrets/*/versions/*``. - username (str): - Output only. The username associated to this - token. - """ - - oauth_token_secret_version: str = proto.Field( - proto.STRING, - number=1, - ) - username: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UserCredential(proto.Message): - r"""Represents a personal access token that authorized the - Connection, and associated metadata. - - Attributes: - user_token_secret_version (str): - Required. A SecretManager resource containing the user token - that authorizes the Cloud Build connection. Format: - ``projects/*/secrets/*/versions/*``. - username (str): - Output only. The username associated to this - token. - """ - - user_token_secret_version: str = proto.Field( - proto.STRING, - number=1, - ) - username: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateConnectionRequest(proto.Message): - r"""Message for creating a Connection - - Attributes: - parent (str): - Required. Project and location where the connection will be - created. Format: ``projects/*/locations/*``. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to create. - connection_id (str): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=2, - message='Connection', - ) - connection_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetConnectionRequest(proto.Message): - r"""Message for getting the details of a Connection. - - Attributes: - name (str): - Required. The name of the Connection to retrieve. Format: - ``projects/*/locations/*/connections/*``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListConnectionsRequest(proto.Message): - r"""Message for requesting list of Connections. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - page_size (int): - Number of results to return in the list. - page_token (str): - Page start. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListConnectionsResponse(proto.Message): - r"""Message for response to listing Connections. - - Attributes: - connections (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Connection]): - The list of Connections. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - connections: MutableSequence['Connection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Connection', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateConnectionRequest(proto.Message): - r"""Message for updating a Connection. - - Attributes: - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - allow_missing (bool): - If set to true, and the connection is not found a new - connection will be created. In this situation - ``update_mask`` is ignored. The creation will succeed only - if the input connection has all the necessary information - (e.g a github_config with both user_oauth_token and - installation_id properties). - etag (str): - The current etag of the connection. - If an etag is provided and does not match the - current etag of the connection, update will be - blocked and an ABORTED error will be returned. - """ - - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=1, - message='Connection', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - etag: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DeleteConnectionRequest(proto.Message): - r"""Message for deleting a Connection. - - Attributes: - name (str): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - etag (str): - The current etag of the connection. - If an etag is provided and does not match the - current etag of the connection, deletion will be - blocked and an ABORTED error will be returned. - validate_only (bool): - If set, validate the request, but do not - actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class CreateRepositoryRequest(proto.Message): - r"""Message for creating a Repository. - - Attributes: - parent (str): - Required. The connection to contain the - repository. If the request is part of a - BatchCreateRepositoriesRequest, this field - should be empty or match the parent specified - there. - repository (google.cloud.devtools.cloudbuild_v2.types.Repository): - Required. The repository to create. - repository_id (str): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - repository: 'Repository' = proto.Field( - proto.MESSAGE, - number=2, - message='Repository', - ) - repository_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BatchCreateRepositoriesRequest(proto.Message): - r"""Message for creating repositoritories in batch. - - Attributes: - parent (str): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field in - the CreateRepositoryRequest messages must either be empty or - match this field. - requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): - Required. The request messages specifying the - repositories to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - requests: MutableSequence['CreateRepositoryRequest'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='CreateRepositoryRequest', - ) - - -class BatchCreateRepositoriesResponse(proto.Message): - r"""Message for response of creating repositories in batch. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - Repository resources created. - """ - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - - -class GetRepositoryRequest(proto.Message): - r"""Message for getting the details of a Repository. - - Attributes: - name (str): - Required. The name of the Repository to retrieve. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListRepositoriesRequest(proto.Message): - r"""Message for requesting list of Repositories. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - page_size (int): - Number of results to return in the list. - page_token (str): - Page start. - filter (str): - A filter expression that filters resources listed in the - response. Expressions must follow API improvement proposal - `AIP-160 `__. e.g. - ``remote_uri:"https://github.com*"``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListRepositoriesResponse(proto.Message): - r"""Message for response to listing Repositories. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - The list of Repositories. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteRepositoryRequest(proto.Message): - r"""Message for deleting a Repository. - - Attributes: - name (str): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - etag (str): - The current etag of the repository. - If an etag is provided and does not match the - current etag of the repository, deletion will be - blocked and an ABORTED error will be returned. - validate_only (bool): - If set, validate the request, but do not - actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class FetchReadWriteTokenRequest(proto.Message): - r"""Message for fetching SCM read/write token. - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FetchReadTokenRequest(proto.Message): - r"""Message for fetching SCM read token. - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FetchReadTokenResponse(proto.Message): - r"""Message for responding to get read token. - - Attributes: - token (str): - The token content. - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Expiration timestamp. Can be empty if unknown - or non-expiring. - """ - - token: str = proto.Field( - proto.STRING, - number=1, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class FetchReadWriteTokenResponse(proto.Message): - r"""Message for responding to get read/write token. - - Attributes: - token (str): - The token content. - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Expiration timestamp. Can be empty if unknown - or non-expiring. - """ - - token: str = proto.Field( - proto.STRING, - number=1, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class ProcessWebhookRequest(proto.Message): - r"""RPC request object accepted by the ProcessWebhook RPC method. - - Attributes: - parent (str): - Required. Project and location where the webhook will be - received. Format: ``projects/*/locations/*``. - body (google.api.httpbody_pb2.HttpBody): - HTTP request body. - webhook_key (str): - Arbitrary additional key to find the maching - repository for a webhook event if needed. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=2, - message=httpbody_pb2.HttpBody, - ) - webhook_key: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchGitRefsRequest(proto.Message): - r"""Request for fetching git refs - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - ref_type (google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest.RefType): - Type of refs to fetch - """ - class RefType(proto.Enum): - r"""Type of refs - - Values: - REF_TYPE_UNSPECIFIED (0): - No type specified. - TAG (1): - To fetch tags. - BRANCH (2): - To fetch branches. - """ - REF_TYPE_UNSPECIFIED = 0 - TAG = 1 - BRANCH = 2 - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - ref_type: RefType = proto.Field( - proto.ENUM, - number=2, - enum=RefType, - ) - - -class FetchGitRefsResponse(proto.Message): - r"""Response for fetching git refs - - Attributes: - ref_names (MutableSequence[str]): - Name of the refs fetched. - """ - - ref_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 89095013..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/devtools/cloudbuild_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py deleted file mode 100644 index fa9a6929..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py deleted file mode 100644 index cad8baae..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py deleted file mode 100644 index 066f3245..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py deleted file mode 100644 index d393a554..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py deleted file mode 100644 index 52aaa857..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py deleted file mode 100644 index eb9a5e29..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py deleted file mode 100644 index ef37e513..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py deleted file mode 100644 index a18ff650..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py deleted file mode 100644 index 58a5dac2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py deleted file mode 100644 index f141cb54..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py deleted file mode 100644 index 2c639684..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchGitRefs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_git_refs(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py deleted file mode 100644 index fde064f0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchGitRefs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_git_refs(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py deleted file mode 100644 index c6c744fc..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchLinkableRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py deleted file mode 100644 index 9d422598..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchLinkableRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py deleted file mode 100644 index b110edb9..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py deleted file mode 100644 index 08680d32..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py deleted file mode 100644 index f2fab11e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadWriteToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py deleted file mode 100644 index 64062425..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadWriteToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py deleted file mode 100644 index cbce2c58..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py deleted file mode 100644 index 7da0f760..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py deleted file mode 100644 index 077cd120..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_repository(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py deleted file mode 100644 index 4f0bbd72..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = client.get_repository(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py deleted file mode 100644 index 78e39000..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py deleted file mode 100644 index b0b6783d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py deleted file mode 100644 index 6140bd1a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py deleted file mode 100644 index b133c8eb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py deleted file mode 100644 index 792d9cd7..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py deleted file mode 100644 index f1583940..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json deleted file mode 100644 index 818d3fc2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ /dev/null @@ -1,2309 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.devtools.cloudbuild.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-build", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.batch_create_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "BatchCreateRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_create_repositories" - }, - "description": "Sample for BatchCreateRepositories", - "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.batch_create_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "BatchCreateRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_create_repositories" - }, - "description": "Sample for BatchCreateRepositories", - "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "cloudbuild_v2_generated_repository_manager_create_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "repository", - "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" - }, - { - "name": "repository_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_repository" - }, - "description": "Sample for CreateRepository", - "file": "cloudbuild_v2_generated_repository_manager_create_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "repository", - "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" - }, - { - "name": "repository_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_repository" - }, - "description": "Sample for CreateRepository", - "file": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_repository" - }, - "description": "Sample for DeleteRepository", - "file": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_repository" - }, - "description": "Sample for DeleteRepository", - "file": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_git_refs", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchGitRefs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", - "shortName": "fetch_git_refs" - }, - "description": "Sample for FetchGitRefs", - "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_git_refs", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchGitRefs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", - "shortName": "fetch_git_refs" - }, - "description": "Sample for FetchGitRefs", - "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_linkable_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchLinkableRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager", - "shortName": "fetch_linkable_repositories" - }, - "description": "Sample for FetchLinkableRepositories", - "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_linkable_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchLinkableRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager", - "shortName": "fetch_linkable_repositories" - }, - "description": "Sample for FetchLinkableRepositories", - "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", - "shortName": "fetch_read_token" - }, - "description": "Sample for FetchReadToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", - "shortName": "fetch_read_token" - }, - "description": "Sample for FetchReadToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_write_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadWriteToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", - "shortName": "fetch_read_write_token" - }, - "description": "Sample for FetchReadWriteToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_write_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadWriteToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", - "shortName": "fetch_read_write_token" - }, - "description": "Sample for FetchReadWriteToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "cloudbuild_v2_generated_repository_manager_get_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", - "shortName": "get_repository" - }, - "description": "Sample for GetRepository", - "file": "cloudbuild_v2_generated_repository_manager_get_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", - "shortName": "get_repository" - }, - "description": "Sample for GetRepository", - "file": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_connections", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "cloudbuild_v2_generated_repository_manager_list_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_connections", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager", - "shortName": "list_repositories" - }, - "description": "Sample for ListRepositories", - "file": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager", - "shortName": "list_repositories" - }, - "description": "Sample for ListRepositories", - "file": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.update_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "cloudbuild_v2_generated_repository_manager_update_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_update_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.update_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py" - } - ] -} diff --git a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py deleted file mode 100644 index 6df46861..00000000 --- a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py +++ /dev/null @@ -1,189 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class cloudbuildCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_repositories': ('parent', 'requests', ), - 'create_connection': ('parent', 'connection', 'connection_id', ), - 'create_repository': ('parent', 'repository', 'repository_id', ), - 'delete_connection': ('name', 'etag', 'validate_only', ), - 'delete_repository': ('name', 'etag', 'validate_only', ), - 'fetch_git_refs': ('repository', 'ref_type', ), - 'fetch_linkable_repositories': ('connection', 'page_size', 'page_token', ), - 'fetch_read_token': ('repository', ), - 'fetch_read_write_token': ('repository', ), - 'get_connection': ('name', ), - 'get_repository': ('name', ), - 'list_connections': ('parent', 'page_size', 'page_token', ), - 'list_repositories': ('parent', 'page_size', 'page_token', 'filter', ), - 'update_connection': ('connection', 'update_mask', 'allow_missing', 'etag', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=cloudbuildCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the cloudbuild client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 525db897..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-build' - - -description = "Google Cloud Build API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-build" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud", "google.cloud.devtools"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf99..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py deleted file mode 100644 index 786103fe..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ /dev/null @@ -1,9596 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerAsyncClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import transports -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert RepositoryManagerClient._get_default_mtls_endpoint(None) is None - assert RepositoryManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (RepositoryManagerClient, "grpc"), - (RepositoryManagerAsyncClient, "grpc_asyncio"), - (RepositoryManagerClient, "rest"), -]) -def test_repository_manager_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.RepositoryManagerGrpcTransport, "grpc"), - (transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.RepositoryManagerRestTransport, "rest"), -]) -def test_repository_manager_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (RepositoryManagerClient, "grpc"), - (RepositoryManagerAsyncClient, "grpc_asyncio"), - (RepositoryManagerClient, "rest"), -]) -def test_repository_manager_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -def test_repository_manager_client_get_transport_class(): - transport = RepositoryManagerClient.get_transport_class() - available_transports = [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerRestTransport, - ] - assert transport in available_transports - - transport = RepositoryManagerClient.get_transport_class("grpc") - assert transport == transports.RepositoryManagerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -def test_repository_manager_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "true"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "false"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "true"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "false"), -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_repository_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - RepositoryManagerClient, RepositoryManagerAsyncClient -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -def test_repository_manager_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), -]) -def test_repository_manager_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", None), -]) -def test_repository_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_repository_manager_client_client_options_from_dict(): - with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = RepositoryManagerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_repository_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateConnectionRequest, - dict, -]) -def test_create_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - client.create_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - -@pytest.mark.asyncio -async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_connection_async_from_dict(): - await test_create_connection_async(request_type=dict) - - -def test_create_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_connection( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - - -def test_create_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - -@pytest.mark.asyncio -async def test_create_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_connection( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetConnectionRequest, - dict, -]) -def test_get_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - ) - response = client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -def test_get_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - client.get_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - -@pytest.mark.asyncio -async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.GetConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - )) - response = await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_connection_async_from_dict(): - await test_get_connection_async(request_type=dict) - - -def test_get_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = repositories.Connection() - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListConnectionsRequest, - dict, -]) -def test_list_connections(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - client.list_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - -@pytest.mark.asyncio -async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=repositories.ListConnectionsRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_connections_async_from_dict(): - await test_list_connections_async(request_type=dict) - - -def test_list_connections_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = repositories.ListConnectionsResponse() - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_connections_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_connections_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_connections_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_connections_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_connections_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_connections(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Connection) - for i in results) -def test_list_connections_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_connections_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Connection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_connections_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.UpdateConnectionRequest, - dict, -]) -def test_update_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - client.update_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - -@pytest.mark.asyncio -async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.UpdateConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_connection_async_from_dict(): - await test_update_connection_async(request_type=dict) - - -def test_update_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.UpdateConnectionRequest() - - request.connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.UpdateConnectionRequest() - - request.connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection.name=name_value', - ) in kw['metadata'] - - -def test_update_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_connection( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_connection( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteConnectionRequest, - dict, -]) -def test_delete_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - client.delete_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - -@pytest.mark.asyncio -async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_connection_async_from_dict(): - await test_delete_connection_async(request_type=dict) - - -def test_delete_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateRepositoryRequest, - dict, -]) -def test_create_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - client.create_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - -@pytest.mark.asyncio -async def test_create_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_repository_async_from_dict(): - await test_create_repository_async(request_type=dict) - - -def test_create_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateRepositoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateRepositoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_repository( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].repository - mock_val = repositories.Repository(name='name_value') - assert arg == mock_val - arg = args[0].repository_id - mock_val = 'repository_id_value' - assert arg == mock_val - - -def test_create_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - -@pytest.mark.asyncio -async def test_create_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_repository( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].repository - mock_val = repositories.Repository(name='name_value') - assert arg == mock_val - arg = args[0].repository_id - mock_val = 'repository_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.BatchCreateRepositoriesRequest, - dict, -]) -def test_batch_create_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_batch_create_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - client.batch_create_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - -@pytest.mark.asyncio -async def test_batch_create_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.BatchCreateRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_batch_create_repositories_async_from_dict(): - await test_batch_create_repositories_async(request_type=dict) - - -def test_batch_create_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.BatchCreateRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_create_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.BatchCreateRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_batch_create_repositories_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_create_repositories( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].requests - mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] - assert arg == mock_val - - -def test_batch_create_repositories_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - -@pytest.mark.asyncio -async def test_batch_create_repositories_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_create_repositories( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].requests - mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_create_repositories_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetRepositoryRequest, - dict, -]) -def test_get_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - ) - response = client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -def test_get_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - client.get_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - -@pytest.mark.asyncio -async def test_get_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.GetRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - )) - response = await client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -@pytest.mark.asyncio -async def test_get_repository_async_from_dict(): - await test_get_repository_async(request_type=dict) - - -def test_get_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - call.return_value = repositories.Repository() - client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) - await client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListRepositoriesRequest, - dict, -]) -def test_list_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - client.list_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - -@pytest.mark.asyncio -async def test_list_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.ListRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_repositories_async_from_dict(): - await test_list_repositories_async(request_type=dict) - - -def test_list_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - call.return_value = repositories.ListRepositoriesResponse() - client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) - await client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_repositories_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_repositories( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_repositories_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_repositories_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_repositories( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_repositories_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - - -def test_list_repositories_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_repositories(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) -def test_list_repositories_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = list(client.list_repositories(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_repositories_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_repositories(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Repository) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_repositories_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_repositories(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteRepositoryRequest, - dict, -]) -def test_delete_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - client.delete_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - -@pytest.mark.asyncio -async def test_delete_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_repository_async_from_dict(): - await test_delete_repository_async(request_type=dict) - - -def test_delete_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadWriteTokenRequest, - dict, -]) -def test_fetch_read_write_token(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse( - token='token_value', - ) - response = client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_write_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - client.fetch_read_write_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - -@pytest.mark.asyncio -async def test_fetch_read_write_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadWriteTokenRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse( - token='token_value', - )) - response = await client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -@pytest.mark.asyncio -async def test_fetch_read_write_token_async_from_dict(): - await test_fetch_read_write_token_async(request_type=dict) - - -def test_fetch_read_write_token_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadWriteTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - call.return_value = repositories.FetchReadWriteTokenResponse() - client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_read_write_token_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadWriteTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) - await client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_read_write_token_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_read_write_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_read_write_token_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_read_write_token_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_read_write_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_read_write_token_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadTokenRequest, - dict, -]) -def test_fetch_read_token(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse( - token='token_value', - ) - response = client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - client.fetch_read_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - -@pytest.mark.asyncio -async def test_fetch_read_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadTokenRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse( - token='token_value', - )) - response = await client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -@pytest.mark.asyncio -async def test_fetch_read_token_async_from_dict(): - await test_fetch_read_token_async(request_type=dict) - - -def test_fetch_read_token_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - call.return_value = repositories.FetchReadTokenResponse() - client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_read_token_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) - await client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_read_token_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_read_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_read_token_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_read_token_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_read_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_read_token_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchLinkableRepositoriesRequest, - dict, -]) -def test_fetch_linkable_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - ) - response = client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_fetch_linkable_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - client.fetch_linkable_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchLinkableRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_from_dict(): - await test_fetch_linkable_repositories_async(request_type=dict) - - -def test_fetch_linkable_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchLinkableRepositoriesRequest() - - request.connection = 'connection_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - call.return_value = repositories.FetchLinkableRepositoriesResponse() - client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection=connection_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchLinkableRepositoriesRequest() - - request.connection = 'connection_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse()) - await client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection=connection_value', - ) in kw['metadata'] - - -def test_fetch_linkable_repositories_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('connection', ''), - )), - ) - pager = client.fetch_linkable_repositories(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) -def test_fetch_linkable_repositories_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = list(client.fetch_linkable_repositories(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - async_pager = await client.fetch_linkable_repositories(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Repository) - for i in responses) - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.fetch_linkable_repositories(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.FetchGitRefsRequest, - dict, -]) -def test_fetch_git_refs(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - ) - response = client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -def test_fetch_git_refs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - client.fetch_git_refs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - -@pytest.mark.asyncio -async def test_fetch_git_refs_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchGitRefsRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - )) - response = await client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -@pytest.mark.asyncio -async def test_fetch_git_refs_async_from_dict(): - await test_fetch_git_refs_async(request_type=dict) - - -def test_fetch_git_refs_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchGitRefsRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - call.return_value = repositories.FetchGitRefsResponse() - client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_git_refs_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchGitRefsRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) - await client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_git_refs_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_git_refs( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_git_refs_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_git_refs_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_git_refs( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_git_refs_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateConnectionRequest, - dict, -]) -def test_create_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_connection_rest_required_fields(request_type=repositories.CreateConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["connection_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "connectionId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "connectionId" in jsonified_request - assert jsonified_request["connectionId"] == request_init["connection_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["connectionId"] = 'connection_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("connection_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "connectionId" in jsonified_request - assert jsonified_request["connectionId"] == 'connection_id_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_connection(request) - - expected_params = [ - ( - "connectionId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("connectionId", )) & set(("parent", "connection", "connectionId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.CreateConnectionRequest.pb(repositories.CreateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.CreateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_connection(request) - - -def test_create_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_create_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -def test_create_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetConnectionRequest, - dict, -]) -def test_get_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -def test_get_connection_rest_required_fields(request_type=repositories.GetConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.GetConnectionRequest.pb(repositories.GetConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.Connection.to_json(repositories.Connection()) - - request = repositories.GetConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.Connection() - - client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.GetConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_connection(request) - - -def test_get_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_get_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - - -def test_get_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListConnectionsRequest, - dict, -]) -def test_list_connections_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_rest_required_fields(request_type=repositories.ListConnectionsRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_connections(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_connections_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connections_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_connections") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.ListConnectionsRequest.pb(repositories.ListConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.ListConnectionsResponse.to_json(repositories.ListConnectionsResponse()) - - request = repositories.ListConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.ListConnectionsResponse() - - client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_connections_rest_bad_request(transport: str = 'rest', request_type=repositories.ListConnectionsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_connections(request) - - -def test_list_connections_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_list_connections_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.ListConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Connection) - for i in results) - - pages = list(client.list_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.UpdateConnectionRequest, - dict, -]) -def test_update_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_connection_rest_required_fields(request_type=repositories.UpdateConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "updateMask", )) & set(("connection", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_update_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_update_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.UpdateConnectionRequest.pb(repositories.UpdateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.UpdateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.UpdateConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_connection(request) - - -def test_update_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{connection.name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_update_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteConnectionRequest, - dict, -]) -def test_delete_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_connection_rest_required_fields(request_type=repositories.DeleteConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.DeleteConnectionRequest.pb(repositories.DeleteConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.DeleteConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_connection(request) - - -def test_delete_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_delete_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - - -def test_delete_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateRepositoryRequest, - dict, -]) -def test_create_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_repository(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_repository_rest_required_fields(request_type=repositories.CreateRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["repository_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "repositoryId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == request_init["repository_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["repositoryId"] = 'repository_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("repository_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == 'repository_id_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_repository(request) - - expected_params = [ - ( - "repositoryId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(("repositoryId", )) & set(("parent", "repository", "repositoryId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.CreateRepositoryRequest.pb(repositories.CreateRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.CreateRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_repository(request) - - -def test_create_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) - - -def test_create_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - -def test_create_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.BatchCreateRepositoriesRequest, - dict, -]) -def test_batch_create_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_create_repositories(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_batch_create_repositories_rest_required_fields(request_type=repositories.BatchCreateRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.batch_create_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_create_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_create_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "requests", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_batch_create_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_batch_create_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.BatchCreateRepositoriesRequest.pb(repositories.BatchCreateRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.BatchCreateRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.batch_create_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_create_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.BatchCreateRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_create_repositories(request) - - -def test_batch_create_repositories_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.batch_create_repositories(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate" % client.transport._host, args[1]) - - -def test_batch_create_repositories_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - -def test_batch_create_repositories_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetRepositoryRequest, - dict, -]) -def test_get_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_repository(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -def test_get_repository_rest_required_fields(request_type=repositories.GetRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.Repository() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_repository(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.GetRepositoryRequest.pb(repositories.GetRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.Repository.to_json(repositories.Repository()) - - request = repositories.GetRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.Repository() - - client.get_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.GetRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_repository(request) - - -def test_get_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Repository() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) - - -def test_get_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - - -def test_get_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListRepositoriesRequest, - dict, -]) -def test_list_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_repositories(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_repositories_rest_required_fields(request_type=repositories.ListRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.ListRepositoriesRequest.pb(repositories.ListRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.ListRepositoriesResponse.to_json(repositories.ListRepositoriesResponse()) - - request = repositories.ListRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.ListRepositoriesResponse() - - client.list_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.ListRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_repositories(request) - - -def test_list_repositories_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_repositories(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) - - -def test_list_repositories_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - - -def test_list_repositories_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.ListRepositoriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - pager = client.list_repositories(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) - - pages = list(client.list_repositories(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteRepositoryRequest, - dict, -]) -def test_delete_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_repository(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_repository_rest_required_fields(request_type=repositories.DeleteRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_repository(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.DeleteRepositoryRequest.pb(repositories.DeleteRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.DeleteRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_repository(request) - - -def test_delete_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) - - -def test_delete_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - - -def test_delete_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadWriteTokenRequest, - dict, -]) -def test_fetch_read_write_token_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_read_write_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_write_token_rest_required_fields(request_type=repositories.FetchReadWriteTokenRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_read_write_token(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_read_write_token_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_read_write_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_read_write_token_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_write_token") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_write_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchReadWriteTokenRequest.pb(repositories.FetchReadWriteTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchReadWriteTokenResponse.to_json(repositories.FetchReadWriteTokenResponse()) - - request = repositories.FetchReadWriteTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchReadWriteTokenResponse() - - client.fetch_read_write_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_read_write_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadWriteTokenRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_read_write_token(request) - - -def test_fetch_read_write_token_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_read_write_token(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken" % client.transport._host, args[1]) - - -def test_fetch_read_write_token_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - - -def test_fetch_read_write_token_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadTokenRequest, - dict, -]) -def test_fetch_read_token_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_read_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_token_rest_required_fields(request_type=repositories.FetchReadTokenRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_read_token(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_read_token_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_read_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_read_token_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_token") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchReadTokenRequest.pb(repositories.FetchReadTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchReadTokenResponse.to_json(repositories.FetchReadTokenResponse()) - - request = repositories.FetchReadTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchReadTokenResponse() - - client.fetch_read_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_read_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadTokenRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_read_token(request) - - -def test_fetch_read_token_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_read_token(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken" % client.transport._host, args[1]) - - -def test_fetch_read_token_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - - -def test_fetch_read_token_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchLinkableRepositoriesRequest, - dict, -]) -def test_fetch_linkable_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_linkable_repositories(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_fetch_linkable_repositories_rest_required_fields(request_type=repositories.FetchLinkableRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["connection"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["connection"] = 'connection_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "connection" in jsonified_request - assert jsonified_request["connection"] == 'connection_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchLinkableRepositoriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_linkable_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_linkable_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_linkable_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("connection", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_linkable_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_linkable_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchLinkableRepositoriesRequest.pb(repositories.FetchLinkableRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchLinkableRepositoriesResponse.to_json(repositories.FetchLinkableRepositoriesResponse()) - - request = repositories.FetchLinkableRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchLinkableRepositoriesResponse() - - client.fetch_linkable_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_linkable_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchLinkableRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_linkable_repositories(request) - - -def test_fetch_linkable_repositories_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.FetchLinkableRepositoriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - - pager = client.fetch_linkable_repositories(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) - - pages = list(client.fetch_linkable_repositories(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchGitRefsRequest, - dict, -]) -def test_fetch_git_refs_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_git_refs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -def test_fetch_git_refs_rest_required_fields(request_type=repositories.FetchGitRefsRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("ref_type", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_git_refs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_git_refs_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_git_refs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("refType", )) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_git_refs_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_git_refs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchGitRefsRequest.pb(repositories.FetchGitRefsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchGitRefsResponse.to_json(repositories.FetchGitRefsResponse()) - - request = repositories.FetchGitRefsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchGitRefsResponse() - - client.fetch_git_refs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_git_refs_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchGitRefsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_git_refs(request) - - -def test_fetch_git_refs_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_git_refs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs" % client.transport._host, args[1]) - - -def test_fetch_git_refs_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - - -def test_fetch_git_refs_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = RepositoryManagerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.RepositoryManagerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - transports.RepositoryManagerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = RepositoryManagerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RepositoryManagerGrpcTransport, - ) - -def test_repository_manager_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RepositoryManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_repository_manager_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.RepositoryManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_connection', - 'get_connection', - 'list_connections', - 'update_connection', - 'delete_connection', - 'create_repository', - 'batch_create_repositories', - 'get_repository', - 'list_repositories', - 'delete_repository', - 'fetch_read_write_token', - 'fetch_read_token', - 'fetch_linkable_repositories', - 'fetch_git_refs', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_repository_manager_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RepositoryManagerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_repository_manager_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RepositoryManagerTransport() - adc.assert_called_once() - - -def test_repository_manager_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RepositoryManagerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - ], -) -def test_repository_manager_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - transports.RepositoryManagerRestTransport, - ], -) -def test_repository_manager_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.RepositoryManagerGrpcTransport, grpc_helpers), - (transports.RepositoryManagerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_repository_manager_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_repository_manager_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.RepositoryManagerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_repository_manager_rest_lro_client(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_repository_manager_host_no_port(transport_name): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_repository_manager_host_with_port(transport_name): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_repository_manager_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = RepositoryManagerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = RepositoryManagerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_connection._session - session2 = client2.transport.create_connection._session - assert session1 != session2 - session1 = client1.transport.get_connection._session - session2 = client2.transport.get_connection._session - assert session1 != session2 - session1 = client1.transport.list_connections._session - session2 = client2.transport.list_connections._session - assert session1 != session2 - session1 = client1.transport.update_connection._session - session2 = client2.transport.update_connection._session - assert session1 != session2 - session1 = client1.transport.delete_connection._session - session2 = client2.transport.delete_connection._session - assert session1 != session2 - session1 = client1.transport.create_repository._session - session2 = client2.transport.create_repository._session - assert session1 != session2 - session1 = client1.transport.batch_create_repositories._session - session2 = client2.transport.batch_create_repositories._session - assert session1 != session2 - session1 = client1.transport.get_repository._session - session2 = client2.transport.get_repository._session - assert session1 != session2 - session1 = client1.transport.list_repositories._session - session2 = client2.transport.list_repositories._session - assert session1 != session2 - session1 = client1.transport.delete_repository._session - session2 = client2.transport.delete_repository._session - assert session1 != session2 - session1 = client1.transport.fetch_read_write_token._session - session2 = client2.transport.fetch_read_write_token._session - assert session1 != session2 - session1 = client1.transport.fetch_read_token._session - session2 = client2.transport.fetch_read_token._session - assert session1 != session2 - session1 = client1.transport.fetch_linkable_repositories._session - session2 = client2.transport.fetch_linkable_repositories._session - assert session1 != session2 - session1 = client1.transport.fetch_git_refs._session - session2 = client2.transport.fetch_git_refs._session - assert session1 != session2 -def test_repository_manager_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RepositoryManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_repository_manager_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RepositoryManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_repository_manager_grpc_lro_client(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_repository_manager_grpc_lro_async_client(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_connection_path(): - project = "squid" - location = "clam" - connection = "whelk" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = RepositoryManagerClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection": "nudibranch", - } - path = RepositoryManagerClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_connection_path(path) - assert expected == actual - -def test_repository_path(): - project = "cuttlefish" - location = "mussel" - connection = "winkle" - repository = "nautilus" - expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - actual = RepositoryManagerClient.repository_path(project, location, connection, repository) - assert expected == actual - - -def test_parse_repository_path(): - expected = { - "project": "scallop", - "location": "abalone", - "connection": "squid", - "repository": "clam", - } - path = RepositoryManagerClient.repository_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_repository_path(path) - assert expected == actual - -def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" - expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - actual = RepositoryManagerClient.secret_version_path(project, secret, version) - assert expected == actual - - -def test_parse_secret_version_path(): - expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", - } - path = RepositoryManagerClient.secret_version_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_secret_version_path(path) - assert expected == actual - -def test_service_path(): - project = "winkle" - location = "nautilus" - namespace = "scallop" - service = "abalone" - expected = "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) - actual = RepositoryManagerClient.service_path(project, location, namespace, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "squid", - "location": "clam", - "namespace": "whelk", - "service": "octopus", - } - path = RepositoryManagerClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = RepositoryManagerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = RepositoryManagerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = RepositoryManagerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = RepositoryManagerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = RepositoryManagerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = RepositoryManagerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = RepositoryManagerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = RepositoryManagerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = RepositoryManagerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = RepositoryManagerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: - transport_class = RepositoryManagerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_cancel_operation(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index bf3f73ec..c9173566 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -777,16 +777,16 @@ async def test_create_build_async_from_dict(): await test_create_build_async(request_type=dict) -def test_create_build_field_headers(): +def test_create_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest() - - request.project_id = "project_id_value" + request = cloudbuild.CreateBuildRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_build), "__call__") as call: @@ -798,44 +798,9 @@ def test_create_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_build_flattened(): @@ -1059,17 +1024,16 @@ async def test_get_build_async_from_dict(): await test_get_build_async(request_type=dict) -def test_get_build_field_headers(): +def test_get_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.GetBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_build), "__call__") as call: @@ -1081,43 +1045,9 @@ def test_get_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - await client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_build_flattened(): @@ -1299,16 +1229,16 @@ async def test_list_builds_async_from_dict(): await test_list_builds_async(request_type=dict) -def test_list_builds_field_headers(): +def test_list_builds_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest() - - request.project_id = "project_id_value" + request = cloudbuild.ListBuildsRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_builds), "__call__") as call: @@ -1320,44 +1250,9 @@ def test_list_builds_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_builds_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_builds), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildsResponse() - ) - await client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_builds_flattened(): @@ -1490,9 +1385,6 @@ def test_list_builds_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), - ) pager = client.list_builds(request={}) assert pager._metadata == metadata @@ -1771,17 +1663,16 @@ async def test_cancel_build_async_from_dict(): await test_cancel_build_async(request_type=dict) -def test_cancel_build_field_headers(): +def test_cancel_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.CancelBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: @@ -1793,43 +1684,9 @@ def test_cancel_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - await client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_cancel_build_flattened(): @@ -2005,17 +1862,16 @@ async def test_retry_build_async_from_dict(): await test_retry_build_async(request_type=dict) -def test_retry_build_field_headers(): +def test_retry_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.RetryBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.retry_build), "__call__") as call: @@ -2027,45 +1883,9 @@ def test_retry_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_retry_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_retry_build_flattened(): @@ -2243,16 +2063,16 @@ async def test_approve_build_async_from_dict(): await test_approve_build_async(request_type=dict) -def test_approve_build_field_headers(): +def test_approve_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest() - - request.name = "name_value" + request = cloudbuild.ApproveBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_build), "__call__") as call: @@ -2264,44 +2084,9 @@ def test_approve_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_approve_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_approve_build_flattened(): @@ -2536,16 +2321,16 @@ async def test_create_build_trigger_async_from_dict(): await test_create_build_trigger_async(request_type=dict) -def test_create_build_trigger_field_headers(): +def test_create_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest() - - request.project_id = "project_id_value" + request = cloudbuild.CreateBuildTriggerRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2559,46 +2344,9 @@ def test_create_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_build_trigger_flattened(): @@ -2829,17 +2577,16 @@ async def test_get_build_trigger_async_from_dict(): await test_get_build_trigger_async(request_type=dict) -def test_get_build_trigger_field_headers(): +def test_get_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.GetBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2853,47 +2600,9 @@ def test_get_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_build_trigger_flattened(): @@ -3087,16 +2796,16 @@ async def test_list_build_triggers_async_from_dict(): await test_list_build_triggers_async(request_type=dict) -def test_list_build_triggers_field_headers(): +def test_list_build_triggers_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest() - - request.project_id = "project_id_value" + request = cloudbuild.ListBuildTriggersRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3110,46 +2819,9 @@ def test_list_build_triggers_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_build_triggers_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildTriggersResponse() - ) - await client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_build_triggers_flattened(): @@ -3278,9 +2950,6 @@ def test_list_build_triggers_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), - ) pager = client.list_build_triggers(request={}) assert pager._metadata == metadata @@ -3523,17 +3192,16 @@ async def test_delete_build_trigger_async_from_dict(): await test_delete_build_trigger_async(request_type=dict) -def test_delete_build_trigger_field_headers(): +def test_delete_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.DeleteBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3547,45 +3215,9 @@ def test_delete_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_delete_build_trigger_flattened(): @@ -3814,17 +3446,20 @@ async def test_update_build_trigger_async_from_dict(): await test_update_build_trigger_async(request_type=dict) -def test_update_build_trigger_field_headers(): +def test_update_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.UpdateBuildTriggerRequest( + **{ + "trigger": { + "resource_name": "projects/sample1/locations/sample2/triggers/sample3" + } + } + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3838,47 +3473,9 @@ def test_update_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_update_build_trigger_flattened(): @@ -4076,17 +3673,16 @@ async def test_run_build_trigger_async_from_dict(): await test_run_build_trigger_async(request_type=dict) -def test_run_build_trigger_field_headers(): +def test_run_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.RunBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4100,47 +3696,9 @@ def test_run_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_run_build_trigger_flattened(): @@ -4495,16 +4053,16 @@ async def test_create_worker_pool_async_from_dict(): await test_create_worker_pool_async(request_type=dict) -def test_create_worker_pool_field_headers(): +def test_create_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest() - - request.parent = "parent_value" + request = cloudbuild.CreateWorkerPoolRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4518,46 +4076,9 @@ def test_create_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_worker_pool_flattened(): @@ -4771,16 +4292,16 @@ async def test_get_worker_pool_async_from_dict(): await test_get_worker_pool_async(request_type=dict) -def test_get_worker_pool_field_headers(): +def test_get_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest() - - request.name = "name_value" + request = cloudbuild.GetWorkerPoolRequest( + **{"name": "projects/sample1/locations/sample2/workerPools/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: @@ -4792,44 +4313,9 @@ def test_get_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.WorkerPool() - ) - await client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_worker_pool_flattened(): @@ -5003,16 +4489,16 @@ async def test_delete_worker_pool_async_from_dict(): await test_delete_worker_pool_async(request_type=dict) -def test_delete_worker_pool_field_headers(): +def test_delete_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest() - - request.name = "name_value" + request = cloudbuild.DeleteWorkerPoolRequest( + **{"name": "projects/sample1/locations/sample2/workerPools/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5026,46 +4512,9 @@ def test_delete_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_delete_worker_pool_flattened(): @@ -5243,16 +4692,20 @@ async def test_update_worker_pool_async_from_dict(): await test_update_worker_pool_async(request_type=dict) -def test_update_worker_pool_field_headers(): +def test_update_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest() - - request.worker_pool.name = "name_value" + request = cloudbuild.UpdateWorkerPoolRequest( + **{ + "worker_pool": { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + } + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5266,46 +4719,9 @@ def test_update_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "worker_pool.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest() - - request.worker_pool.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "worker_pool.name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_update_worker_pool_flattened(): @@ -5499,16 +4915,16 @@ async def test_list_worker_pools_async_from_dict(): await test_list_worker_pools_async(request_type=dict) -def test_list_worker_pools_field_headers(): +def test_list_worker_pools_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest() - - request.parent = "parent_value" + request = cloudbuild.ListWorkerPoolsRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5522,46 +4938,9 @@ def test_list_worker_pools_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_worker_pools_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListWorkerPoolsResponse() - ) - await client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_worker_pools_flattened(): @@ -5690,9 +5069,6 @@ def test_list_worker_pools_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) pager = client.list_worker_pools(request={}) assert pager._metadata == metadata @@ -13004,19 +12380,22 @@ def test_parse_build_path(): def test_build_trigger_path(): project = "oyster" - trigger = "nudibranch" - expected = "projects/{project}/triggers/{trigger}".format( + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format( project=project, + location=location, trigger=trigger, ) - actual = CloudBuildClient.build_trigger_path(project, trigger) + actual = CloudBuildClient.build_trigger_path(project, location, trigger) assert expected == actual def test_parse_build_trigger_path(): expected = { - "project": "cuttlefish", - "trigger": "mussel", + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", } path = CloudBuildClient.build_trigger_path(**expected) @@ -13026,10 +12405,10 @@ def test_parse_build_trigger_path(): def test_crypto_key_path(): - project = "winkle" - location = "nautilus" - keyring = "scallop" - key = "abalone" + project = "scallop" + location = "abalone" + keyring = "squid" + key = "clam" expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format( project=project, location=location, @@ -13042,10 +12421,10 @@ def test_crypto_key_path(): def test_parse_crypto_key_path(): expected = { - "project": "squid", - "location": "clam", - "keyring": "whelk", - "key": "octopus", + "project": "whelk", + "location": "octopus", + "keyring": "oyster", + "key": "nudibranch", } path = CloudBuildClient.crypto_key_path(**expected) @@ -13055,8 +12434,8 @@ def test_parse_crypto_key_path(): def test_network_path(): - project = "oyster" - network = "nudibranch" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -13067,8 +12446,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "cuttlefish", - "network": "mussel", + "project": "winkle", + "network": "nautilus", } path = CloudBuildClient.network_path(**expected) @@ -13078,10 +12457,10 @@ def test_parse_network_path(): def test_repository_path(): - project = "winkle" - location = "nautilus" - connection = "scallop" - repository = "abalone" + project = "scallop" + location = "abalone" + connection = "squid" + repository = "clam" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -13094,10 +12473,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "squid", - "location": "clam", - "connection": "whelk", - "repository": "octopus", + "project": "whelk", + "location": "octopus", + "connection": "oyster", + "repository": "nudibranch", } path = CloudBuildClient.repository_path(**expected) @@ -13107,9 +12486,9 @@ def test_parse_repository_path(): def test_secret_version_path(): - project = "oyster" - secret = "nudibranch" - version = "cuttlefish" + project = "cuttlefish" + secret = "mussel" + version = "winkle" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -13121,9 +12500,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "mussel", - "secret": "winkle", - "version": "nautilus", + "project": "nautilus", + "secret": "scallop", + "version": "abalone", } path = CloudBuildClient.secret_version_path(**expected) @@ -13133,8 +12512,8 @@ def test_parse_secret_version_path(): def test_service_account_path(): - project = "scallop" - service_account = "abalone" + project = "squid" + service_account = "clam" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -13145,8 +12524,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "squid", - "service_account": "clam", + "project": "whelk", + "service_account": "octopus", } path = CloudBuildClient.service_account_path(**expected) @@ -13156,8 +12535,8 @@ def test_parse_service_account_path(): def test_subscription_path(): - project = "whelk" - subscription = "octopus" + project = "oyster" + subscription = "nudibranch" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -13168,8 +12547,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "oyster", - "subscription": "nudibranch", + "project": "cuttlefish", + "subscription": "mussel", } path = CloudBuildClient.subscription_path(**expected) @@ -13179,8 +12558,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "cuttlefish" - topic = "mussel" + project = "winkle" + topic = "nautilus" expected = "projects/{project}/topics/{topic}".format( project=project, topic=topic, @@ -13191,8 +12570,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "winkle", - "topic": "nautilus", + "project": "scallop", + "topic": "abalone", } path = CloudBuildClient.topic_path(**expected) @@ -13202,9 +12581,9 @@ def test_parse_topic_path(): def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" + project = "squid" + location = "clam" + worker_pool = "whelk" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -13218,9 +12597,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", + "project": "octopus", + "location": "oyster", + "worker_pool": "nudibranch", } path = CloudBuildClient.worker_pool_path(**expected) @@ -13230,7 +12609,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -13240,7 +12619,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = CloudBuildClient.common_billing_account_path(**expected) @@ -13250,7 +12629,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -13260,7 +12639,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = CloudBuildClient.common_folder_path(**expected) @@ -13270,7 +12649,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -13280,7 +12659,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = CloudBuildClient.common_organization_path(**expected) @@ -13290,7 +12669,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -13300,7 +12679,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = CloudBuildClient.common_project_path(**expected) @@ -13310,8 +12689,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -13322,8 +12701,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = CloudBuildClient.common_location_path(**expected) From 168b3fefd09973f04da21ccae61736f1d0ecacde Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 12 Jul 2023 17:49:55 +0000 Subject: [PATCH 3/4] fix: Revert breaking change of main pattern PiperOrigin-RevId: 547506413 Source-Link: https://github.com/googleapis/googleapis/commit/8080bbe89bd9c5bcc23c2e548ebf33354095f8ee Source-Link: https://github.com/googleapis/googleapis-gen/commit/ac98388bb832dbebc0bcd6b7480010573c0bbdc4 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWM5ODM4OGJiODMyZGJlYmMwYmNkNmI3NDgwMDEwNTczYzBiYmRjNCJ9 --- owl-bot-staging/v1/.coveragerc | 13 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + .../v1/docs/cloudbuild_v1/cloud_build.rst | 10 + .../v1/docs/cloudbuild_v1/services.rst | 6 + .../v1/docs/cloudbuild_v1/types.rst | 6 + owl-bot-staging/v1/docs/conf.py | 376 + owl-bot-staging/v1/docs/index.rst | 7 + .../cloud/devtools/cloudbuild/__init__.py | 151 + .../devtools/cloudbuild/gapic_version.py | 16 + .../google/cloud/devtools/cloudbuild/py.typed | 2 + .../cloud/devtools/cloudbuild_v1/__init__.py | 152 + .../cloudbuild_v1/gapic_metadata.json | 298 + .../devtools/cloudbuild_v1/gapic_version.py | 16 + .../cloud/devtools/cloudbuild_v1/py.typed | 2 + .../cloudbuild_v1/services/__init__.py | 15 + .../services/cloud_build/__init__.py | 22 + .../services/cloud_build/async_client.py | 2601 ++++ .../services/cloud_build/client.py | 2899 +++++ .../services/cloud_build/pagers.py | 381 + .../cloud_build/transports/__init__.py | 38 + .../services/cloud_build/transports/base.py | 443 + .../services/cloud_build/transports/grpc.py | 793 ++ .../cloud_build/transports/grpc_asyncio.py | 792 ++ .../services/cloud_build/transports/rest.py | 2419 ++++ .../devtools/cloudbuild_v1/types/__init__.py | 144 + .../cloudbuild_v1/types/cloudbuild.py | 3680 ++++++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...nerated_cloud_build_approve_build_async.py | 56 + ...enerated_cloud_build_approve_build_sync.py | 56 + ...enerated_cloud_build_cancel_build_async.py | 53 + ...generated_cloud_build_cancel_build_sync.py | 53 + ...enerated_cloud_build_create_build_async.py | 56 + ...generated_cloud_build_create_build_sync.py | 56 + ..._cloud_build_create_build_trigger_async.py | 56 + ...d_cloud_build_create_build_trigger_sync.py | 56 + ...ed_cloud_build_create_worker_pool_async.py | 57 + ...ted_cloud_build_create_worker_pool_sync.py | 57 + ..._cloud_build_delete_build_trigger_async.py | 51 + ...d_cloud_build_delete_build_trigger_sync.py | 51 + ...ed_cloud_build_delete_worker_pool_async.py | 56 + ...ted_cloud_build_delete_worker_pool_sync.py | 56 + ...1_generated_cloud_build_get_build_async.py | 53 + ...v1_generated_cloud_build_get_build_sync.py | 53 + ...ted_cloud_build_get_build_trigger_async.py | 53 + ...ated_cloud_build_get_build_trigger_sync.py | 53 + ...rated_cloud_build_get_worker_pool_async.py | 52 + ...erated_cloud_build_get_worker_pool_sync.py | 52 + ...d_cloud_build_list_build_triggers_async.py | 53 + ...ed_cloud_build_list_build_triggers_sync.py | 53 + ...generated_cloud_build_list_builds_async.py | 53 + ..._generated_cloud_build_list_builds_sync.py | 53 + ...ted_cloud_build_list_worker_pools_async.py | 53 + ...ated_cloud_build_list_worker_pools_sync.py | 53 + ...oud_build_receive_trigger_webhook_async.py | 51 + ...loud_build_receive_trigger_webhook_sync.py | 51 + ...generated_cloud_build_retry_build_async.py | 57 + ..._generated_cloud_build_retry_build_sync.py | 57 + ...ted_cloud_build_run_build_trigger_async.py | 57 + ...ated_cloud_build_run_build_trigger_sync.py | 57 + ..._cloud_build_update_build_trigger_async.py | 57 + ...d_cloud_build_update_build_trigger_sync.py | 57 + ...ed_cloud_build_update_worker_pool_async.py | 55 + ...ted_cloud_build_update_worker_pool_sync.py | 55 + ...etadata_google.devtools.cloudbuild.v1.json | 3027 +++++ .../scripts/fixup_cloudbuild_v1_keywords.py | 193 + owl-bot-staging/v1/setup.py | 90 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 9 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/cloudbuild_v1/__init__.py | 16 + .../gapic/cloudbuild_v1/test_cloud_build.py | 10280 ++++++++++++++++ owl-bot-staging/v2/.coveragerc | 13 + owl-bot-staging/v2/.flake8 | 33 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + .../docs/cloudbuild_v2/repository_manager.rst | 10 + .../v2/docs/cloudbuild_v2/services.rst | 6 + .../v2/docs/cloudbuild_v2/types.rst | 6 + owl-bot-staging/v2/docs/conf.py | 376 + owl-bot-staging/v2/docs/index.rst | 7 + .../cloud/devtools/cloudbuild/__init__.py | 93 + .../devtools/cloudbuild/gapic_version.py | 16 + .../google/cloud/devtools/cloudbuild/py.typed | 2 + .../cloud/devtools/cloudbuild_v2/__init__.py | 94 + .../cloudbuild_v2/gapic_metadata.json | 238 + .../devtools/cloudbuild_v2/gapic_version.py | 16 + .../cloud/devtools/cloudbuild_v2/py.typed | 2 + .../cloudbuild_v2/services/__init__.py | 15 + .../services/repository_manager/__init__.py | 22 + .../repository_manager/async_client.py | 2257 ++++ .../services/repository_manager/client.py | 2445 ++++ .../services/repository_manager/pagers.py | 381 + .../repository_manager/transports/__init__.py | 38 + .../repository_manager/transports/base.py | 431 + .../repository_manager/transports/grpc.py | 743 ++ .../transports/grpc_asyncio.py | 742 ++ .../repository_manager/transports/rest.py | 2275 ++++ .../devtools/cloudbuild_v2/types/__init__.py | 88 + .../cloudbuild_v2/types/cloudbuild.py | 159 + .../cloudbuild_v2/types/repositories.py | 1104 ++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 184 + ...manager_batch_create_repositories_async.py | 62 + ..._manager_batch_create_repositories_sync.py | 62 + ...ository_manager_create_connection_async.py | 57 + ...pository_manager_create_connection_sync.py | 57 + ...ository_manager_create_repository_async.py | 61 + ...pository_manager_create_repository_sync.py | 61 + ...ository_manager_delete_connection_async.py | 56 + ...pository_manager_delete_connection_sync.py | 56 + ...ository_manager_delete_repository_async.py | 56 + ...pository_manager_delete_repository_sync.py | 56 + ...repository_manager_fetch_git_refs_async.py | 52 + ..._repository_manager_fetch_git_refs_sync.py | 52 + ...nager_fetch_linkable_repositories_async.py | 53 + ...anager_fetch_linkable_repositories_sync.py | 53 + ...pository_manager_fetch_read_token_async.py | 52 + ...epository_manager_fetch_read_token_sync.py | 52 + ...ry_manager_fetch_read_write_token_async.py | 52 + ...ory_manager_fetch_read_write_token_sync.py | 52 + ...repository_manager_get_connection_async.py | 52 + ..._repository_manager_get_connection_sync.py | 52 + ...repository_manager_get_repository_async.py | 52 + ..._repository_manager_get_repository_sync.py | 52 + ...pository_manager_list_connections_async.py | 53 + ...epository_manager_list_connections_sync.py | 53 + ...ository_manager_list_repositories_async.py | 53 + ...pository_manager_list_repositories_sync.py | 53 + ...ository_manager_update_connection_async.py | 55 + ...pository_manager_update_connection_sync.py | 55 + ...etadata_google.devtools.cloudbuild.v2.json | 2309 ++++ .../scripts/fixup_cloudbuild_v2_keywords.py | 189 + owl-bot-staging/v2/setup.py | 91 + .../v2/testing/constraints-3.10.txt | 7 + .../v2/testing/constraints-3.11.txt | 7 + .../v2/testing/constraints-3.12.txt | 7 + .../v2/testing/constraints-3.7.txt | 10 + .../v2/testing/constraints-3.8.txt | 7 + .../v2/testing/constraints-3.9.txt | 7 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/cloudbuild_v2/__init__.py | 16 + .../cloudbuild_v2/test_repository_manager.py | 9596 +++++++++++++++ 153 files changed, 56880 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/types.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json create mode 100644 owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/.flake8 create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py create mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py create mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json create mode 100644 owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 00000000..a0cf72db --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/devtools/cloudbuild/__init__.py + google/cloud/devtools/cloudbuild/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 00000000..af14cd40 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/devtools/cloudbuild *.py +recursive-include google/cloud/devtools/cloudbuild_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 00000000..c788a1b3 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Devtools Cloudbuild API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Devtools Cloudbuild API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst new file mode 100644 index 00000000..be81dc5c --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst @@ -0,0 +1,10 @@ +CloudBuild +---------------------------- + +.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build + :members: + :inherited-members: + +.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst new file mode 100644 index 00000000..c0bdc88d --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Devtools Cloudbuild v1 API +==================================================== +.. toctree:: + :maxdepth: 2 + + cloud_build diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst new file mode 100644 index 00000000..0e955742 --- /dev/null +++ b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Devtools Cloudbuild v1 API +================================================= + +.. automodule:: google.cloud.devtools.cloudbuild_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 00000000..4bd8e2dd --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-build documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-build" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Devtools Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-build-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-build.tex", + u"google-cloud-build Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-build", + u"Google Cloud Devtools Cloudbuild Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-build", + u"google-cloud-build Documentation", + author, + "google-cloud-build", + "GAPIC library for Google Cloud Devtools Cloudbuild API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 00000000..37eed237 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudbuild_v1/services + cloudbuild_v1/types diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py new file mode 100644 index 00000000..06ff95e8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient + +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalResult +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApproveBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildApproval +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOptions +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildStep +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildTrigger +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import InlineSecret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PrivatePoolV1Config +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PubsubConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PullRequestFilter +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PushFilter +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepositoryEventConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepoSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Results +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RetryBuildRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RunBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SecretManagerSecret +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secrets +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Source +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSourceManifest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolOperationMetadata +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedMavenArtifact +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedNpmPackage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedPythonPackage +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WebhookConfig +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool + +__all__ = ('CloudBuildClient', + 'CloudBuildAsyncClient', + 'ApprovalConfig', + 'ApprovalResult', + 'ApproveBuildRequest', + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildApproval', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolOperationMetadata', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolOperationMetadata', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'GitSource', + 'Hash', + 'InlineSecret', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'PrivatePoolV1Config', + 'PubsubConfig', + 'PullRequestFilter', + 'PushFilter', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'RepositoryEventConfig', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'SecretManagerSecret', + 'Secrets', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'StorageSourceManifest', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolRequest', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'UploadedPythonPackage', + 'Volume', + 'WebhookConfig', + 'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py new file mode 100644 index 00000000..9fcffdb4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_build import CloudBuildClient +from .services.cloud_build import CloudBuildAsyncClient + +from .types.cloudbuild import ApprovalConfig +from .types.cloudbuild import ApprovalResult +from .types.cloudbuild import ApproveBuildRequest +from .types.cloudbuild import ArtifactResult +from .types.cloudbuild import Artifacts +from .types.cloudbuild import Build +from .types.cloudbuild import BuildApproval +from .types.cloudbuild import BuildOperationMetadata +from .types.cloudbuild import BuildOptions +from .types.cloudbuild import BuildStep +from .types.cloudbuild import BuildTrigger +from .types.cloudbuild import BuiltImage +from .types.cloudbuild import CancelBuildRequest +from .types.cloudbuild import CreateBuildRequest +from .types.cloudbuild import CreateBuildTriggerRequest +from .types.cloudbuild import CreateWorkerPoolOperationMetadata +from .types.cloudbuild import CreateWorkerPoolRequest +from .types.cloudbuild import DeleteBuildTriggerRequest +from .types.cloudbuild import DeleteWorkerPoolOperationMetadata +from .types.cloudbuild import DeleteWorkerPoolRequest +from .types.cloudbuild import FileHashes +from .types.cloudbuild import GetBuildRequest +from .types.cloudbuild import GetBuildTriggerRequest +from .types.cloudbuild import GetWorkerPoolRequest +from .types.cloudbuild import GitHubEventsConfig +from .types.cloudbuild import GitSource +from .types.cloudbuild import Hash +from .types.cloudbuild import InlineSecret +from .types.cloudbuild import ListBuildsRequest +from .types.cloudbuild import ListBuildsResponse +from .types.cloudbuild import ListBuildTriggersRequest +from .types.cloudbuild import ListBuildTriggersResponse +from .types.cloudbuild import ListWorkerPoolsRequest +from .types.cloudbuild import ListWorkerPoolsResponse +from .types.cloudbuild import PrivatePoolV1Config +from .types.cloudbuild import PubsubConfig +from .types.cloudbuild import PullRequestFilter +from .types.cloudbuild import PushFilter +from .types.cloudbuild import ReceiveTriggerWebhookRequest +from .types.cloudbuild import ReceiveTriggerWebhookResponse +from .types.cloudbuild import RepositoryEventConfig +from .types.cloudbuild import RepoSource +from .types.cloudbuild import Results +from .types.cloudbuild import RetryBuildRequest +from .types.cloudbuild import RunBuildTriggerRequest +from .types.cloudbuild import Secret +from .types.cloudbuild import SecretManagerSecret +from .types.cloudbuild import Secrets +from .types.cloudbuild import Source +from .types.cloudbuild import SourceProvenance +from .types.cloudbuild import StorageSource +from .types.cloudbuild import StorageSourceManifest +from .types.cloudbuild import TimeSpan +from .types.cloudbuild import UpdateBuildTriggerRequest +from .types.cloudbuild import UpdateWorkerPoolOperationMetadata +from .types.cloudbuild import UpdateWorkerPoolRequest +from .types.cloudbuild import UploadedMavenArtifact +from .types.cloudbuild import UploadedNpmPackage +from .types.cloudbuild import UploadedPythonPackage +from .types.cloudbuild import Volume +from .types.cloudbuild import WebhookConfig +from .types.cloudbuild import WorkerPool + +__all__ = ( + 'CloudBuildAsyncClient', +'ApprovalConfig', +'ApprovalResult', +'ApproveBuildRequest', +'ArtifactResult', +'Artifacts', +'Build', +'BuildApproval', +'BuildOperationMetadata', +'BuildOptions', +'BuildStep', +'BuildTrigger', +'BuiltImage', +'CancelBuildRequest', +'CloudBuildClient', +'CreateBuildRequest', +'CreateBuildTriggerRequest', +'CreateWorkerPoolOperationMetadata', +'CreateWorkerPoolRequest', +'DeleteBuildTriggerRequest', +'DeleteWorkerPoolOperationMetadata', +'DeleteWorkerPoolRequest', +'FileHashes', +'GetBuildRequest', +'GetBuildTriggerRequest', +'GetWorkerPoolRequest', +'GitHubEventsConfig', +'GitSource', +'Hash', +'InlineSecret', +'ListBuildTriggersRequest', +'ListBuildTriggersResponse', +'ListBuildsRequest', +'ListBuildsResponse', +'ListWorkerPoolsRequest', +'ListWorkerPoolsResponse', +'PrivatePoolV1Config', +'PubsubConfig', +'PullRequestFilter', +'PushFilter', +'ReceiveTriggerWebhookRequest', +'ReceiveTriggerWebhookResponse', +'RepoSource', +'RepositoryEventConfig', +'Results', +'RetryBuildRequest', +'RunBuildTriggerRequest', +'Secret', +'SecretManagerSecret', +'Secrets', +'Source', +'SourceProvenance', +'StorageSource', +'StorageSourceManifest', +'TimeSpan', +'UpdateBuildTriggerRequest', +'UpdateWorkerPoolOperationMetadata', +'UpdateWorkerPoolRequest', +'UploadedMavenArtifact', +'UploadedNpmPackage', +'UploadedPythonPackage', +'Volume', +'WebhookConfig', +'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json new file mode 100644 index 00000000..2648fd24 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json @@ -0,0 +1,298 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.devtools.cloudbuild_v1", + "protoPackage": "google.devtools.cloudbuild.v1", + "schema": "1.0", + "services": { + "CloudBuild": { + "clients": { + "grpc": { + "libraryClient": "CloudBuildClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudBuildAsyncClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + }, + "rest": { + "libraryClient": "CloudBuildClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py new file mode 100644 index 00000000..89a37dc9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py new file mode 100644 index 00000000..b796e7cd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudBuildClient +from .async_client import CloudBuildAsyncClient + +__all__ = ( + 'CloudBuildClient', + 'CloudBuildAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py new file mode 100644 index 00000000..00eb8e04 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -0,0 +1,2601 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .client import CloudBuildClient + + +class CloudBuildAsyncClient: + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + """ + + _client: CloudBuildClient + + DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT + + build_path = staticmethod(CloudBuildClient.build_path) + parse_build_path = staticmethod(CloudBuildClient.parse_build_path) + build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) + parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) + crypto_key_path = staticmethod(CloudBuildClient.crypto_key_path) + parse_crypto_key_path = staticmethod(CloudBuildClient.parse_crypto_key_path) + network_path = staticmethod(CloudBuildClient.network_path) + parse_network_path = staticmethod(CloudBuildClient.parse_network_path) + repository_path = staticmethod(CloudBuildClient.repository_path) + parse_repository_path = staticmethod(CloudBuildClient.parse_repository_path) + secret_version_path = staticmethod(CloudBuildClient.secret_version_path) + parse_secret_version_path = staticmethod(CloudBuildClient.parse_secret_version_path) + service_account_path = staticmethod(CloudBuildClient.service_account_path) + parse_service_account_path = staticmethod(CloudBuildClient.parse_service_account_path) + subscription_path = staticmethod(CloudBuildClient.subscription_path) + parse_subscription_path = staticmethod(CloudBuildClient.parse_subscription_path) + topic_path = staticmethod(CloudBuildClient.topic_path) + parse_topic_path = staticmethod(CloudBuildClient.parse_topic_path) + worker_pool_path = staticmethod(CloudBuildClient.worker_pool_path) + parse_worker_pool_path = staticmethod(CloudBuildClient.parse_worker_pool_path) + common_billing_account_path = staticmethod(CloudBuildClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudBuildClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudBuildClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudBuildClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudBuildClient.parse_common_organization_path) + common_project_path = staticmethod(CloudBuildClient.common_project_path) + parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) + common_location_path = staticmethod(CloudBuildClient.common_location_path) + parse_common_location_path = staticmethod(CloudBuildClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildAsyncClient: The constructed client. + """ + return CloudBuildClient.from_service_account_info.__func__(CloudBuildAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildAsyncClient: The constructed client. + """ + return CloudBuildClient.from_service_account_file.__func__(CloudBuildAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudBuildClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudBuildTransport: + """Returns the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudBuildTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud build client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudBuildTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudBuildClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_build(self, + request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + build: Optional[cloudbuild.Build] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]]): + The request object. Request to create a new build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + build (:class:`google.cloud.devtools.cloudbuild_v1.types.Build`): + Required. Build resource to create. + This corresponds to the ``build`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_build(self, + request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.get_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]]): + The request object. Request to get a build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_build, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_builds(self, + request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsAsyncPager: + r"""Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]]): + The request object. Request to list builds. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The raw filter text to constrain the + results. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager: + Response including listed builds. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListBuildsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_builds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBuildsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_build(self, + request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Cancels a build in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.cancel_build(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]]): + The request object. Request to cancel an ongoing build. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CancelBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retry_build(self, + request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]]): + The request object. Specifies a build to retry. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (:class:`str`): + Required. Build ID of the original + build. + + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.RetryBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.retry_build, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("id", request.id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def approve_build(self, + request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, + *, + name: Optional[str] = None, + approval_result: Optional[cloudbuild.ApprovalResult] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]]): + The request object. Request to approve or reject a + pending build. + name (:class:`str`): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + approval_result (:class:`google.cloud.devtools.cloudbuild_v1.types.ApprovalResult`): + Approval decision and metadata. + This corresponds to the ``approval_result`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, approval_result]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ApproveBuildRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if approval_result is not None: + request.approval_result = approval_result + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.approve_build, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def create_build_trigger(self, + request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Creates a new ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.create_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]]): + The request object. Request to create a new ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project for which + to configure automatic builds. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): + Required. ``BuildTrigger`` to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_build_trigger(self, + request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Returns information about a ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = await client.get_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]]): + The request object. Returns the ``BuildTrigger`` with the specified ID. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_build_triggers(self, + request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersAsyncPager: + r"""Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]]): + The request object. Request to list existing ``BuildTriggers``. + project_id (:class:`str`): + Required. ID of the project for which + to list BuildTriggers. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager: + Response containing existing BuildTriggers. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListBuildTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_build_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBuildTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_build_trigger(self, + request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + await client.delete_build_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]]): + The request object. Request to delete a ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the ``BuildTrigger`` to delete. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.DeleteBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_build_trigger(self, + request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.update_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]]): + The request object. Request to update an existing ``BuildTrigger``. + project_id (:class:`str`): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the ``BuildTrigger`` to update. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): + Required. ``BuildTrigger`` to update. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.UpdateBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_build_trigger(self, + request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + source: Optional[cloudbuild.RepoSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]]): + The request object. Specifies a build trigger to run and + the source to use. + project_id (:class:`str`): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. ID of the trigger. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): + Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + + This corresponds to the ``source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.RunBuildTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_build_trigger, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger_id", request.trigger_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + async def receive_trigger_webhook(self, + request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = await client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]]): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the response object for the + ReceiveTriggerWebhook method. + + """ + # Create or coerce a protobuf request object. + request = cloudbuild.ReceiveTriggerWebhookRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.receive_trigger_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger", request.trigger), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_worker_pool(self, + request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, + *, + parent: Optional[str] = None, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + worker_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]]): + The request object. Request to create a new ``WorkerPool``. + parent (:class:`str`): + Required. The parent resource where this worker pool + will be created. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): + Required. ``WorkerPool`` resource to create. + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool_id (:class:`str`): + Required. Immutable. The ID to use for the + ``WorkerPool``, which will become the final component of + the resource name. + + This value should be 1-63 characters, and valid + characters are /[a-z][0-9]-/. + + This corresponds to the ``worker_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, worker_pool, worker_pool_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.CreateWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if worker_pool is not None: + request.worker_pool = worker_pool + if worker_pool_id is not None: + request.worker_pool_id = worker_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_worker_pool(self, + request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: + r"""Returns details of a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_worker_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]]): + The request object. Request to get a ``WorkerPool`` with the specified name. + name (:class:`str`): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.WorkerPool: + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.GetWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_worker_pool, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_worker_pool(self, + request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]]): + The request object. Request to delete a ``WorkerPool``. + name (:class:`str`): + Required. The name of the ``WorkerPool`` to delete. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.DeleteWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_worker_pool(self, + request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, + *, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]]): + The request object. Request to update a ``WorkerPool``. + worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the + ``WorkerPool`` to update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask specifying which fields in ``worker_pool`` to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([worker_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.UpdateWorkerPoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if worker_pool is not None: + request.worker_pool = worker_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_worker_pool, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("worker_pool.name", request.worker_pool.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + async def list_worker_pools(self, + request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkerPoolsAsyncPager: + r"""Lists ``WorkerPool``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]]): + The request object. Request to list ``WorkerPool``\ s. + parent (:class:`str`): + Required. The parent of the collection of + ``WorkerPools``. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager: + Response containing existing WorkerPools. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudbuild.ListWorkerPoolsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_worker_pools, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkerPoolsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudBuildAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudBuildAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py new file mode 100644 index 00000000..927380bf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -0,0 +1,2899 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudBuildGrpcTransport +from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .transports.rest import CloudBuildRestTransport + + +class CloudBuildClientMeta(type): + """Metaclass for the CloudBuild client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] + _transport_registry["grpc"] = CloudBuildGrpcTransport + _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport + _transport_registry["rest"] = CloudBuildRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CloudBuildTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudBuildClient(metaclass=CloudBuildClientMeta): + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudBuildTransport: + """Returns the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def build_path(project: str,build: str,) -> str: + """Returns a fully-qualified build string.""" + return "projects/{project}/builds/{build}".format(project=project, build=build, ) + + @staticmethod + def parse_build_path(path: str) -> Dict[str,str]: + """Parses a build path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def build_trigger_path(project: str,trigger: str,) -> str: + """Returns a fully-qualified build_trigger string.""" + return "projects/{project}/triggers/{trigger}".format(project=project, trigger=trigger, ) + + @staticmethod + def parse_build_trigger_path(path: str) -> Dict[str,str]: + """Parses a build_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,keyring: str,key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str,network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str,str]: + """Parses a network path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def repository_path(project: str,location: str,connection: str,repository: str,) -> str: + """Returns a fully-qualified repository string.""" + return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + + @staticmethod + def parse_repository_path(path: str) -> Dict[str,str]: + """Parses a repository path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def secret_version_path(project: str,secret: str,version: str,) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str,str]: + """Parses a secret_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path(project: str,service_account: str,) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str,str]: + """Parses a service_account path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str,subscription: str,) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str,str]: + """Parses a subscription path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def topic_path(project: str,topic: str,) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str,str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def worker_pool_path(project: str,location: str,worker_pool: str,) -> str: + """Returns a fully-qualified worker_pool string.""" + return "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) + + @staticmethod + def parse_worker_pool_path(path: str) -> Dict[str,str]: + """Parses a worker_pool path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudBuildTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud build client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudBuildTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudBuildTransport): + # transport is a CloudBuildTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_build(self, + request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + build: Optional[cloudbuild.Build] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]): + The request object. Request to create a new build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Required. Build resource to create. + This corresponds to the ``build`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildRequest): + request = cloudbuild.CreateBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def get_build(self, + request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.get_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]): + The request object. Request to get a build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildRequest): + request = cloudbuild.GetBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_builds(self, + request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsPager: + r"""Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]): + The request object. Request to list builds. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The raw filter text to constrain the + results. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager: + Response including listed builds. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildsRequest): + request = cloudbuild.ListBuildsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_builds] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBuildsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_build(self, + request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Cancels a build in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.cancel_build(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]): + The request object. Request to cancel an ongoing build. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. ID of the build. + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.Build: + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CancelBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CancelBuildRequest): + request = cloudbuild.CancelBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retry_build(self, + request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, + *, + project_id: Optional[str] = None, + id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]): + The request object. Specifies a build to retry. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + id (str): + Required. Build ID of the original + build. + + This corresponds to the ``id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RetryBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RetryBuildRequest): + request = cloudbuild.RetryBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.retry_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def approve_build(self, + request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, + *, + name: Optional[str] = None, + approval_result: Optional[cloudbuild.ApprovalResult] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]): + The request object. Request to approve or reject a + pending build. + name (str): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Approval decision and metadata. + This corresponds to the ``approval_result`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, approval_result]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ApproveBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ApproveBuildRequest): + request = cloudbuild.ApproveBuildRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if approval_result is not None: + request.approval_result = approval_result + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.approve_build] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def create_build_trigger(self, + request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Creates a new ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = client.create_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]): + The request object. Request to create a new ``BuildTrigger``. + project_id (str): + Required. ID of the project for which + to configure automatic builds. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): + request = cloudbuild.CreateBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_build_trigger(self, + request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Returns information about a ``BuildTrigger``. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = client.get_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]): + The request object. Returns the ``BuildTrigger`` with the specified ID. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildTriggerRequest): + request = cloudbuild.GetBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_build_triggers(self, + request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersPager: + r"""Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]): + The request object. Request to list existing ``BuildTriggers``. + project_id (str): + Required. ID of the project for which + to list BuildTriggers. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager: + Response containing existing BuildTriggers. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildTriggersRequest): + request = cloudbuild.ListBuildTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBuildTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_build_trigger(self, + request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + client.delete_build_trigger(request=request) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]): + The request object. Request to delete a ``BuildTrigger``. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to delete. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): + request = cloudbuild.DeleteBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_build_trigger(self, + request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + trigger: Optional[cloudbuild.BuildTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = client.update_build_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]): + The request object. Request to update an existing ``BuildTrigger``. + project_id (str): + Required. ID of the project that owns + the trigger. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to update. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to update. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): + request = cloudbuild.UpdateBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.trigger.resource_name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_build_trigger(self, + request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, + *, + project_id: Optional[str] = None, + trigger_id: Optional[str] = None, + source: Optional[cloudbuild.RepoSource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]): + The request object. Specifies a build trigger to run and + the source to use. + project_id (str): + Required. ID of the project. + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. ID of the trigger. + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + + This corresponds to the ``source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.Build` + A build resource in the Cloud Build API. + + At a high level, a Build describes where to find + source code, how to build it (for example, the + builder image to run on the source), and where to + store the built artifacts. + + Fields can include the following variables, which + will be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified + by RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA + specified by RepoSource or resolved from the + specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RunBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RunBuildTriggerRequest): + request = cloudbuild.RunBuildTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.Build, + metadata_type=cloudbuild.BuildOperationMetadata, + ) + + # Done; return the response. + return response + + def receive_trigger_webhook(self, + request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the response object for the + ReceiveTriggerWebhook method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ReceiveTriggerWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ReceiveTriggerWebhookRequest): + request = cloudbuild.ReceiveTriggerWebhookRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.receive_trigger_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("trigger", request.trigger), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_worker_pool(self, + request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, + *, + parent: Optional[str] = None, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + worker_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]): + The request object. Request to create a new ``WorkerPool``. + parent (str): + Required. The parent resource where this worker pool + will be created. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. ``WorkerPool`` resource to create. + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + worker_pool_id (str): + Required. Immutable. The ID to use for the + ``WorkerPool``, which will become the final component of + the resource name. + + This value should be 1-63 characters, and valid + characters are /[a-z][0-9]-/. + + This corresponds to the ``worker_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, worker_pool, worker_pool_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): + request = cloudbuild.CreateWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if worker_pool is not None: + request.worker_pool = worker_pool + if worker_pool_id is not None: + request.worker_pool_id = worker_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def get_worker_pool(self, + request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: + r"""Returns details of a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_worker_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]): + The request object. Request to get a ``WorkerPool`` with the specified name. + name (str): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.WorkerPool: + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetWorkerPoolRequest): + request = cloudbuild.GetWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_worker_pool(self, + request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]): + The request object. Request to delete a ``WorkerPool``. + name (str): + Required. The name of the ``WorkerPool`` to delete. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): + request = cloudbuild.DeleteWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def update_worker_pool(self, + request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, + *, + worker_pool: Optional[cloudbuild.WorkerPool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a ``WorkerPool``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]): + The request object. Request to update a ``WorkerPool``. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the + ``WorkerPool`` to update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + + This corresponds to the ``worker_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask specifying which fields in ``worker_pool`` to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + Configuration for a WorkerPool. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a WorkerPool to run your + builds. Private WorkerPools give your builds access + to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see + [Private pools + overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([worker_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): + request = cloudbuild.UpdateWorkerPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if worker_pool is not None: + request.worker_pool = worker_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') + regex_match = routing_param_regex.match(request.worker_pool.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloudbuild.WorkerPool, + metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, + ) + + # Done; return the response. + return response + + def list_worker_pools(self, + request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkerPoolsPager: + r"""Lists ``WorkerPool``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]): + The request object. Request to list ``WorkerPool``\ s. + parent (str): + Required. The parent of the collection of + ``WorkerPools``. Format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager: + Response containing existing WorkerPools. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListWorkerPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): + request = cloudbuild.ListWorkerPoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] + + header_params = {} + + routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkerPoolsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudBuildClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudBuildClient", +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py new file mode 100644 index 00000000..f255025e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild + + +class ListBuildsPager: + """A pager for iterating through ``list_builds`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``builds`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuilds`` requests and continue to iterate + through the ``builds`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListBuildsResponse], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListBuildsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.Build]: + for page in self.pages: + yield from page.builds + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildsAsyncPager: + """A pager for iterating through ``list_builds`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``builds`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuilds`` requests and continue to iterate + through the ``builds`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListBuildsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.Build]: + async def async_generator(): + async for page in self.pages: + for response in page.builds: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildTriggersPager: + """A pager for iterating through ``list_build_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuildTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListBuildTriggersResponse], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListBuildTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.BuildTrigger]: + for page in self.pages: + yield from page.triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBuildTriggersAsyncPager: + """A pager for iterating through ``list_build_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuildTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListBuildTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListBuildTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.BuildTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListWorkerPoolsPager: + """A pager for iterating through ``list_worker_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``worker_pools`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkerPools`` requests and continue to iterate + through the ``worker_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudbuild.ListWorkerPoolsResponse], + request: cloudbuild.ListWorkerPoolsRequest, + response: cloudbuild.ListWorkerPoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListWorkerPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudbuild.ListWorkerPoolsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloudbuild.WorkerPool]: + for page in self.pages: + yield from page.worker_pools + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListWorkerPoolsAsyncPager: + """A pager for iterating through ``list_worker_pools`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``worker_pools`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkerPools`` requests and continue to iterate + through the ``worker_pools`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListWorkerPoolsResponse]], + request: cloudbuild.ListWorkerPoolsRequest, + response: cloudbuild.ListWorkerPoolsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudbuild.ListWorkerPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudbuild.ListWorkerPoolsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloudbuild.WorkerPool]: + async def async_generator(): + async for page in self.pages: + for response in page.worker_pools: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py new file mode 100644 index 00000000..d7a78973 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudBuildTransport +from .grpc import CloudBuildGrpcTransport +from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .rest import CloudBuildRestTransport +from .rest import CloudBuildRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] +_transport_registry['grpc'] = CloudBuildGrpcTransport +_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport +_transport_registry['rest'] = CloudBuildRestTransport + +__all__ = ( + 'CloudBuildTransport', + 'CloudBuildGrpcTransport', + 'CloudBuildGrpcAsyncIOTransport', + 'CloudBuildRestTransport', + 'CloudBuildRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py new file mode 100644 index 00000000..cf5f61cc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -0,0 +1,443 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class CloudBuildTransport(abc.ABC): + """Abstract transport class for CloudBuild.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudbuild.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_build: gapic_v1.method.wrap_method( + self.create_build, + default_timeout=600.0, + client_info=client_info, + ), + self.get_build: gapic_v1.method.wrap_method( + self.get_build, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_builds: gapic_v1.method.wrap_method( + self.list_builds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.cancel_build: gapic_v1.method.wrap_method( + self.cancel_build, + default_timeout=600.0, + client_info=client_info, + ), + self.retry_build: gapic_v1.method.wrap_method( + self.retry_build, + default_timeout=600.0, + client_info=client_info, + ), + self.approve_build: gapic_v1.method.wrap_method( + self.approve_build, + default_timeout=None, + client_info=client_info, + ), + self.create_build_trigger: gapic_v1.method.wrap_method( + self.create_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.get_build_trigger: gapic_v1.method.wrap_method( + self.get_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_build_triggers: gapic_v1.method.wrap_method( + self.list_build_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_build_trigger: gapic_v1.method.wrap_method( + self.delete_build_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_build_trigger: gapic_v1.method.wrap_method( + self.update_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.run_build_trigger: gapic_v1.method.wrap_method( + self.run_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.receive_trigger_webhook: gapic_v1.method.wrap_method( + self.receive_trigger_webhook, + default_timeout=None, + client_info=client_info, + ), + self.create_worker_pool: gapic_v1.method.wrap_method( + self.create_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.get_worker_pool: gapic_v1.method.wrap_method( + self.get_worker_pool, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_worker_pool: gapic_v1.method.wrap_method( + self.delete_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.update_worker_pool: gapic_v1.method.wrap_method( + self.update_worker_pool, + default_timeout=600.0, + client_info=client_info, + ), + self.list_worker_pools: gapic_v1.method.wrap_method( + self.list_worker_pools, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + Union[ + cloudbuild.Build, + Awaitable[cloudbuild.Build] + ]]: + raise NotImplementedError() + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + Union[ + cloudbuild.ListBuildsResponse, + Awaitable[cloudbuild.ListBuildsResponse] + ]]: + raise NotImplementedError() + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + Union[ + cloudbuild.Build, + Awaitable[cloudbuild.Build] + ]]: + raise NotImplementedError() + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Union[ + cloudbuild.ListBuildTriggersResponse, + Awaitable[cloudbuild.ListBuildTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + Union[ + cloudbuild.BuildTrigger, + Awaitable[cloudbuild.BuildTrigger] + ]]: + raise NotImplementedError() + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + Union[ + cloudbuild.ReceiveTriggerWebhookResponse, + Awaitable[cloudbuild.ReceiveTriggerWebhookResponse] + ]]: + raise NotImplementedError() + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + Union[ + cloudbuild.WorkerPool, + Awaitable[cloudbuild.WorkerPool] + ]]: + raise NotImplementedError() + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Union[ + cloudbuild.ListWorkerPoolsResponse, + Awaitable[cloudbuild.ListWorkerPoolsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CloudBuildTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py new file mode 100644 index 00000000..08b246b2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO + + +class CloudBuildGrpcTransport(CloudBuildTransport): + """gRPC backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the create build method over gRPC. + + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + Returns: + Callable[[~.CreateBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + request_serializer=cloudbuild.CreateBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_build'] + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + cloudbuild.Build]: + r"""Return a callable for the get build method over gRPC. + + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + Returns: + Callable[[~.GetBuildRequest], + ~.Build]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + request_serializer=cloudbuild.GetBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['get_build'] + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + cloudbuild.ListBuildsResponse]: + r"""Return a callable for the list builds method over gRPC. + + Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + Returns: + Callable[[~.ListBuildsRequest], + ~.ListBuildsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + request_serializer=cloudbuild.ListBuildsRequest.serialize, + response_deserializer=cloudbuild.ListBuildsResponse.deserialize, + ) + return self._stubs['list_builds'] + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + cloudbuild.Build]: + r"""Return a callable for the cancel build method over gRPC. + + Cancels a build in progress. + + Returns: + Callable[[~.CancelBuildRequest], + ~.Build]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + request_serializer=cloudbuild.CancelBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['cancel_build'] + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the retry build method over gRPC. + + Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + Returns: + Callable[[~.RetryBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + request_serializer=cloudbuild.RetryBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['retry_build'] + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + operations_pb2.Operation]: + r"""Return a callable for the approve build method over gRPC. + + Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + Returns: + Callable[[~.ApproveBuildRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'approve_build' not in self._stubs: + self._stubs['approve_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', + request_serializer=cloudbuild.ApproveBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['approve_build'] + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the create build trigger method over gRPC. + + Creates a new ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.CreateBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['create_build_trigger'] + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the get build trigger method over gRPC. + + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.GetBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['get_build_trigger'] + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + cloudbuild.ListBuildTriggersResponse]: + r"""Return a callable for the list build triggers method over gRPC. + + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Returns: + Callable[[~.ListBuildTriggersRequest], + ~.ListBuildTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, + response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, + ) + return self._stubs['list_build_triggers'] + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete build trigger method over gRPC. + + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.DeleteBuildTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_build_trigger'] + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + r"""Return a callable for the update build trigger method over gRPC. + + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.UpdateBuildTriggerRequest], + ~.BuildTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['update_build_trigger'] + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + operations_pb2.Operation]: + r"""Return a callable for the run build trigger method over gRPC. + + Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + Returns: + Callable[[~.RunBuildTriggerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['run_build_trigger'] + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + cloudbuild.ReceiveTriggerWebhookResponse]: + r"""Return a callable for the receive trigger webhook method over gRPC. + + ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + Returns: + Callable[[~.ReceiveTriggerWebhookRequest], + ~.ReceiveTriggerWebhookResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'receive_trigger_webhook' not in self._stubs: + self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', + request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, + response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, + ) + return self._stubs['receive_trigger_webhook'] + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the create worker pool method over gRPC. + + Creates a ``WorkerPool``. + + Returns: + Callable[[~.CreateWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_worker_pool'] + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + cloudbuild.WorkerPool]: + r"""Return a callable for the get worker pool method over gRPC. + + Returns details of a ``WorkerPool``. + + Returns: + Callable[[~.GetWorkerPoolRequest], + ~.WorkerPool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, + response_deserializer=cloudbuild.WorkerPool.deserialize, + ) + return self._stubs['get_worker_pool'] + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete worker pool method over gRPC. + + Deletes a ``WorkerPool``. + + Returns: + Callable[[~.DeleteWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_worker_pool'] + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + operations_pb2.Operation]: + r"""Return a callable for the update worker pool method over gRPC. + + Updates a ``WorkerPool``. + + Returns: + Callable[[~.UpdateWorkerPoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_worker_pool'] + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + cloudbuild.ListWorkerPoolsResponse]: + r"""Return a callable for the list worker pools method over gRPC. + + Lists ``WorkerPool``\ s. + + Returns: + Callable[[~.ListWorkerPoolsRequest], + ~.ListWorkerPoolsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, + response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, + ) + return self._stubs['list_worker_pools'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CloudBuildGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py new file mode 100644 index 00000000..e9f45881 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -0,0 +1,792 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudBuildGrpcTransport + + +class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): + """gRPC AsyncIO backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create build method over gRPC. + + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes + the build ID. Pass the build ID to ``GetBuild`` to determine the + build status (such as ``SUCCESS`` or ``FAILURE``). + + Returns: + Callable[[~.CreateBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + request_serializer=cloudbuild.CreateBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_build'] + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + Awaitable[cloudbuild.Build]]: + r"""Return a callable for the get build method over gRPC. + + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as + ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing + information. + + Returns: + Callable[[~.GetBuildRequest], + Awaitable[~.Build]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + request_serializer=cloudbuild.GetBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['get_build'] + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + Awaitable[cloudbuild.ListBuildsResponse]]: + r"""Return a callable for the list builds method over gRPC. + + Lists previously requested builds. + Previously requested builds may still be in-progress, or + may have finished successfully or unsuccessfully. + + Returns: + Callable[[~.ListBuildsRequest], + Awaitable[~.ListBuildsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + request_serializer=cloudbuild.ListBuildsRequest.serialize, + response_deserializer=cloudbuild.ListBuildsResponse.deserialize, + ) + return self._stubs['list_builds'] + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + Awaitable[cloudbuild.Build]]: + r"""Return a callable for the cancel build method over gRPC. + + Cancels a build in progress. + + Returns: + Callable[[~.CancelBuildRequest], + Awaitable[~.Build]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + request_serializer=cloudbuild.CancelBuildRequest.serialize, + response_deserializer=cloudbuild.Build.deserialize, + ) + return self._stubs['cancel_build'] + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the retry build method over gRPC. + + Creates a new build based on the specified build. + + This method creates a new build using the original build + request, which may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a + retry of a triggered build will result in a build that uses + the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the + retried build will build from the tip of that branch, which + may not be the same revision as the original build. + - If the original build specified a commit sha or revision ID, + the retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Cloud Storage + without specifying the generation of the object, the new + build will use the current object, which may be different + from the original build source. + - If the original build pulled source from Cloud Storage and + specified the generation of the object, the new build will + attempt to use the same object, which may or may not be + available depending on the bucket's lifecycle management + settings. + + Returns: + Callable[[~.RetryBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + request_serializer=cloudbuild.RetryBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['retry_build'] + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the approve build method over gRPC. + + Approves or rejects a pending build. + If approved, the returned LRO will be analogous to the + LRO returned from a CreateBuild call. + + If rejected, the returned LRO will be immediately done. + + Returns: + Callable[[~.ApproveBuildRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'approve_build' not in self._stubs: + self._stubs['approve_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', + request_serializer=cloudbuild.ApproveBuildRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['approve_build'] + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the create build trigger method over gRPC. + + Creates a new ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.CreateBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['create_build_trigger'] + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the get build trigger method over gRPC. + + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable[[~.GetBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['get_build_trigger'] + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Awaitable[cloudbuild.ListBuildTriggersResponse]]: + r"""Return a callable for the list build triggers method over gRPC. + + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Returns: + Callable[[~.ListBuildTriggersRequest], + Awaitable[~.ListBuildTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, + response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, + ) + return self._stubs['list_build_triggers'] + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete build trigger method over gRPC. + + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.DeleteBuildTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_build_trigger'] + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: + r"""Return a callable for the update build trigger method over gRPC. + + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable[[~.UpdateBuildTriggerRequest], + Awaitable[~.BuildTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, + response_deserializer=cloudbuild.BuildTrigger.deserialize, + ) + return self._stubs['update_build_trigger'] + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the run build trigger method over gRPC. + + Runs a ``BuildTrigger`` at a particular source revision. + + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + + Returns: + Callable[[~.RunBuildTriggerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['run_build_trigger'] + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + Awaitable[cloudbuild.ReceiveTriggerWebhookResponse]]: + r"""Return a callable for the receive trigger webhook method over gRPC. + + ReceiveTriggerWebhook [Experimental] is called when the API + receives a webhook request targeted at a specific trigger. + + Returns: + Callable[[~.ReceiveTriggerWebhookRequest], + Awaitable[~.ReceiveTriggerWebhookResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'receive_trigger_webhook' not in self._stubs: + self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', + request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, + response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, + ) + return self._stubs['receive_trigger_webhook'] + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create worker pool method over gRPC. + + Creates a ``WorkerPool``. + + Returns: + Callable[[~.CreateWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_worker_pool'] + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + Awaitable[cloudbuild.WorkerPool]]: + r"""Return a callable for the get worker pool method over gRPC. + + Returns details of a ``WorkerPool``. + + Returns: + Callable[[~.GetWorkerPoolRequest], + Awaitable[~.WorkerPool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, + response_deserializer=cloudbuild.WorkerPool.deserialize, + ) + return self._stubs['get_worker_pool'] + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete worker pool method over gRPC. + + Deletes a ``WorkerPool``. + + Returns: + Callable[[~.DeleteWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_worker_pool'] + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update worker pool method over gRPC. + + Updates a ``WorkerPool``. + + Returns: + Callable[[~.UpdateWorkerPoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_worker_pool'] + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Awaitable[cloudbuild.ListWorkerPoolsResponse]]: + r"""Return a callable for the list worker pools method over gRPC. + + Lists ``WorkerPool``\ s. + + Returns: + Callable[[~.ListWorkerPoolsRequest], + Awaitable[~.ListWorkerPoolsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, + response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, + ) + return self._stubs['list_worker_pools'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'CloudBuildGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py new file mode 100644 index 00000000..8d6d8492 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py @@ -0,0 +1,2419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudBuildRestInterceptor: + """Interceptor for CloudBuild. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudBuildRestTransport. + + .. code-block:: python + class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): + def pre_approve_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_approve_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_builds(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_builds(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_build_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_build_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_worker_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_worker_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_receive_trigger_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_receive_trigger_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retry_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retry_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) + client = CloudBuildClient(transport=transport) + + + """ + def pre_approve_build(self, request: cloudbuild.ApproveBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ApproveBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for approve_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_approve_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for approve_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_cancel_build(self, request: cloudbuild.CancelBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CancelBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for cancel_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_build(self, request: cloudbuild.CreateBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_build_trigger(self, request: cloudbuild.CreateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_create_worker_pool(self, request: cloudbuild.CreateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_delete_build_trigger(self, request: cloudbuild.DeleteBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def pre_delete_worker_pool(self, request: cloudbuild.DeleteWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_delete_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_build(self, request: cloudbuild.GetBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for get_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_build_trigger(self, request: cloudbuild.GetBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_get_worker_pool(self, request: cloudbuild.GetWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_worker_pool(self, response: cloudbuild.WorkerPool) -> cloudbuild.WorkerPool: + """Post-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_builds(self, request: cloudbuild.ListBuildsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_builds + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_builds(self, response: cloudbuild.ListBuildsResponse) -> cloudbuild.ListBuildsResponse: + """Post-rpc interceptor for list_builds + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_build_triggers(self, request: cloudbuild.ListBuildTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_build_triggers(self, response: cloudbuild.ListBuildTriggersResponse) -> cloudbuild.ListBuildTriggersResponse: + """Post-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_list_worker_pools(self, request: cloudbuild.ListWorkerPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListWorkerPoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_worker_pools(self, response: cloudbuild.ListWorkerPoolsResponse) -> cloudbuild.ListWorkerPoolsResponse: + """Post-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_receive_trigger_webhook(self, request: cloudbuild.ReceiveTriggerWebhookRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ReceiveTriggerWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_receive_trigger_webhook(self, response: cloudbuild.ReceiveTriggerWebhookResponse) -> cloudbuild.ReceiveTriggerWebhookResponse: + """Post-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_retry_build(self, request: cloudbuild.RetryBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RetryBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retry_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_retry_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for retry_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_run_build_trigger(self, request: cloudbuild.RunBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RunBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_run_build_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_update_build_trigger(self, request: cloudbuild.UpdateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + def pre_update_worker_pool(self, request: cloudbuild.UpdateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudBuildRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudBuildRestInterceptor + + +class CloudBuildRestTransport(CloudBuildTransport): + """REST backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CloudBuildRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudBuildRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=operations/**}:cancel', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=operations/**}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _ApproveBuild(CloudBuildRestStub): + def __hash__(self): + return hash("ApproveBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ApproveBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the approve build method over HTTP. + + Args: + request (~.cloudbuild.ApproveBuildRequest): + The request object. Request to approve or reject a + pending build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/builds/*}:approve', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:approve', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_approve_build(request, metadata) + pb_request = cloudbuild.ApproveBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_approve_build(resp) + return resp + + class _CancelBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CancelBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CancelBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.Build: + r"""Call the cancel build method over HTTP. + + Args: + request (~.cloudbuild.CancelBuildRequest): + The request object. Request to cancel an ongoing build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds/{id}:cancel', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:cancel', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_cancel_build(request, metadata) + pb_request = cloudbuild.CancelBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_build(resp) + return resp + + class _CreateBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create build method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildRequest): + The request object. Request to create a new build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds', + 'body': 'build', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/builds', + 'body': 'build', + }, + ] + request, metadata = self._interceptor.pre_create_build(request, metadata) + pb_request = cloudbuild.CreateBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build(resp) + return resp + + class _CreateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the create build trigger method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildTriggerRequest): + The request object. Request to create a new ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers', + 'body': 'trigger', + }, +{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_create_build_trigger(request, metadata) + pb_request = cloudbuild.CreateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build_trigger(resp) + return resp + + class _CreateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("CreateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "workerPoolId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.CreateWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create worker pool method over HTTP. + + Args: + request (~.cloudbuild.CreateWorkerPoolRequest): + The request object. Request to create a new ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', + 'body': 'worker_pool', + }, + ] + request, metadata = self._interceptor.pre_create_worker_pool(request, metadata) + pb_request = cloudbuild.CreateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_worker_pool(resp) + return resp + + class _DeleteBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.DeleteBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete build trigger method over HTTP. + + Args: + request (~.cloudbuild.DeleteBuildTriggerRequest): + The request object. Request to delete a ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + }, +{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_build_trigger(request, metadata) + pb_request = cloudbuild.DeleteBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.DeleteWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete worker pool method over HTTP. + + Args: + request (~.cloudbuild.DeleteWorkerPoolRequest): + The request object. Request to delete a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_worker_pool(request, metadata) + pb_request = cloudbuild.DeleteWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_worker_pool(resp) + return resp + + class _GetBuild(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.Build: + r"""Call the get build method over HTTP. + + Args: + request (~.cloudbuild.GetBuildRequest): + The request object. Request to get a build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/builds/{id}', + }, +{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}', + }, + ] + request, metadata = self._interceptor.pre_get_build(request, metadata) + pb_request = cloudbuild.GetBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build(resp) + return resp + + class _GetBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the get build trigger method over HTTP. + + Args: + request (~.cloudbuild.GetBuildTriggerRequest): + The request object. Returns the ``BuildTrigger`` with the specified ID. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + }, +{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_build_trigger(request, metadata) + pb_request = cloudbuild.GetBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build_trigger(resp) + return resp + + class _GetWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("GetWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.GetWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.WorkerPool: + r"""Call the get worker pool method over HTTP. + + Args: + request (~.cloudbuild.GetWorkerPoolRequest): + The request object. Request to get a ``WorkerPool`` with the specified name. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.WorkerPool: + Configuration for a ``WorkerPool``. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a ``WorkerPool`` to run your + builds. Private ``WorkerPool``\ s give your builds + access to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see `Private + pools + overview `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', + }, + ] + request, metadata = self._interceptor.pre_get_worker_pool(request, metadata) + pb_request = cloudbuild.GetWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.WorkerPool() + pb_resp = cloudbuild.WorkerPool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_worker_pool(resp) + return resp + + class _ListBuilds(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuilds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListBuildsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListBuildsResponse: + r"""Call the list builds method over HTTP. + + Args: + request (~.cloudbuild.ListBuildsRequest): + The request object. Request to list builds. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildsResponse: + Response including listed builds. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/builds', + }, +{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/builds', + }, + ] + request, metadata = self._interceptor.pre_list_builds(request, metadata) + pb_request = cloudbuild.ListBuildsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildsResponse() + pb_resp = cloudbuild.ListBuildsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_builds(resp) + return resp + + class _ListBuildTriggers(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuildTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListBuildTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListBuildTriggersResponse: + r"""Call the list build triggers method over HTTP. + + Args: + request (~.cloudbuild.ListBuildTriggersRequest): + The request object. Request to list existing ``BuildTriggers``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildTriggersResponse: + Response containing existing ``BuildTriggers``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/projects/{project_id}/triggers', + }, +{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + }, + ] + request, metadata = self._interceptor.pre_list_build_triggers(request, metadata) + pb_request = cloudbuild.ListBuildTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildTriggersResponse() + pb_resp = cloudbuild.ListBuildTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_build_triggers(resp) + return resp + + class _ListWorkerPools(CloudBuildRestStub): + def __hash__(self): + return hash("ListWorkerPools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.ListWorkerPoolsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ListWorkerPoolsResponse: + r"""Call the list worker pools method over HTTP. + + Args: + request (~.cloudbuild.ListWorkerPoolsRequest): + The request object. Request to list ``WorkerPool``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListWorkerPoolsResponse: + Response containing existing ``WorkerPools``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', + }, + ] + request, metadata = self._interceptor.pre_list_worker_pools(request, metadata) + pb_request = cloudbuild.ListWorkerPoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListWorkerPoolsResponse() + pb_resp = cloudbuild.ListWorkerPoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_worker_pools(resp) + return resp + + class _ReceiveTriggerWebhook(CloudBuildRestStub): + def __hash__(self): + return hash("ReceiveTriggerWebhook") + + def __call__(self, + request: cloudbuild.ReceiveTriggerWebhookRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""Call the receive trigger webhook method over HTTP. + + Args: + request (~.cloudbuild.ReceiveTriggerWebhookRequest): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the + response object for the ReceiveTriggerWebhook method. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers/{trigger}:webhook', + 'body': 'body', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:webhook', + 'body': 'body', + }, + ] + request, metadata = self._interceptor.pre_receive_trigger_webhook(request, metadata) + pb_request = cloudbuild.ReceiveTriggerWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ReceiveTriggerWebhookResponse() + pb_resp = cloudbuild.ReceiveTriggerWebhookResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_receive_trigger_webhook(resp) + return resp + + class _RetryBuild(CloudBuildRestStub): + def __hash__(self): + return hash("RetryBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.RetryBuildRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the retry build method over HTTP. + + Args: + request (~.cloudbuild.RetryBuildRequest): + The request object. Specifies a build to retry. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/builds/{id}:retry', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/builds/*}:retry', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_retry_build(request, metadata) + pb_request = cloudbuild.RetryBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retry_build(resp) + return resp + + class _RunBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("RunBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.RunBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the run build trigger method over HTTP. + + Args: + request (~.cloudbuild.RunBuildTriggerRequest): + The request object. Specifies a build trigger to run and + the source to use. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}:run', + 'body': 'source', + }, +{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:run', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_run_build_trigger(request, metadata) + pb_request = cloudbuild.RunBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_build_trigger(resp) + return resp + + class _UpdateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.UpdateBuildTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloudbuild.BuildTrigger: + r"""Call the update build trigger method over HTTP. + + Args: + request (~.cloudbuild.UpdateBuildTriggerRequest): + The request object. Request to update an existing ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', + 'body': 'trigger', + }, +{ + 'method': 'patch', + 'uri': '/v1/{trigger.resource_name=projects/*/locations/*/triggers/*}', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_update_build_trigger(request, metadata) + pb_request = cloudbuild.UpdateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_build_trigger(resp) + return resp + + class _UpdateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloudbuild.UpdateWorkerPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update worker pool method over HTTP. + + Args: + request (~.cloudbuild.UpdateWorkerPoolRequest): + The request object. Request to update a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}', + 'body': 'worker_pool', + }, + ] + request, metadata = self._interceptor.pre_update_worker_pool(request, metadata) + pb_request = cloudbuild.UpdateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_worker_pool(resp) + return resp + + @property + def approve_build(self) -> Callable[ + [cloudbuild.ApproveBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApproveBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + cloudbuild.WorkerPool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + cloudbuild.ListBuildsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuilds(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + cloudbuild.ListBuildTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuildTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + cloudbuild.ListWorkerPoolsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkerPools(self._session, self._host, self._interceptor) # type: ignore + + @property + def receive_trigger_webhook(self) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + cloudbuild.ReceiveTriggerWebhookResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReceiveTriggerWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetryBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CloudBuildRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py new file mode 100644 index 00000000..fab30741 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudbuild import ( + ApprovalConfig, + ApprovalResult, + ApproveBuildRequest, + ArtifactResult, + Artifacts, + Build, + BuildApproval, + BuildOperationMetadata, + BuildOptions, + BuildStep, + BuildTrigger, + BuiltImage, + CancelBuildRequest, + CreateBuildRequest, + CreateBuildTriggerRequest, + CreateWorkerPoolOperationMetadata, + CreateWorkerPoolRequest, + DeleteBuildTriggerRequest, + DeleteWorkerPoolOperationMetadata, + DeleteWorkerPoolRequest, + FileHashes, + GetBuildRequest, + GetBuildTriggerRequest, + GetWorkerPoolRequest, + GitHubEventsConfig, + GitSource, + Hash, + InlineSecret, + ListBuildsRequest, + ListBuildsResponse, + ListBuildTriggersRequest, + ListBuildTriggersResponse, + ListWorkerPoolsRequest, + ListWorkerPoolsResponse, + PrivatePoolV1Config, + PubsubConfig, + PullRequestFilter, + PushFilter, + ReceiveTriggerWebhookRequest, + ReceiveTriggerWebhookResponse, + RepositoryEventConfig, + RepoSource, + Results, + RetryBuildRequest, + RunBuildTriggerRequest, + Secret, + SecretManagerSecret, + Secrets, + Source, + SourceProvenance, + StorageSource, + StorageSourceManifest, + TimeSpan, + UpdateBuildTriggerRequest, + UpdateWorkerPoolOperationMetadata, + UpdateWorkerPoolRequest, + UploadedMavenArtifact, + UploadedNpmPackage, + UploadedPythonPackage, + Volume, + WebhookConfig, + WorkerPool, +) + +__all__ = ( + 'ApprovalConfig', + 'ApprovalResult', + 'ApproveBuildRequest', + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildApproval', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolOperationMetadata', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolOperationMetadata', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'GitSource', + 'Hash', + 'InlineSecret', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'PrivatePoolV1Config', + 'PubsubConfig', + 'PullRequestFilter', + 'PushFilter', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'RepositoryEventConfig', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'SecretManagerSecret', + 'Secrets', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'StorageSourceManifest', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolRequest', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'UploadedPythonPackage', + 'Volume', + 'WebhookConfig', + 'WorkerPool', +) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py new file mode 100644 index 00000000..838474d2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -0,0 +1,3680 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v1', + manifest={ + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'StorageSource', + 'GitSource', + 'RepoSource', + 'StorageSourceManifest', + 'Source', + 'BuiltImage', + 'UploadedPythonPackage', + 'UploadedMavenArtifact', + 'UploadedNpmPackage', + 'BuildStep', + 'Volume', + 'Results', + 'ArtifactResult', + 'Build', + 'Artifacts', + 'TimeSpan', + 'BuildOperationMetadata', + 'SourceProvenance', + 'FileHashes', + 'Hash', + 'Secrets', + 'InlineSecret', + 'SecretManagerSecret', + 'Secret', + 'CreateBuildRequest', + 'GetBuildRequest', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'CancelBuildRequest', + 'ApproveBuildRequest', + 'BuildApproval', + 'ApprovalConfig', + 'ApprovalResult', + 'BuildTrigger', + 'RepositoryEventConfig', + 'GitHubEventsConfig', + 'PubsubConfig', + 'WebhookConfig', + 'PullRequestFilter', + 'PushFilter', + 'CreateBuildTriggerRequest', + 'GetBuildTriggerRequest', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'DeleteBuildTriggerRequest', + 'UpdateBuildTriggerRequest', + 'BuildOptions', + 'ReceiveTriggerWebhookRequest', + 'ReceiveTriggerWebhookResponse', + 'WorkerPool', + 'PrivatePoolV1Config', + 'CreateWorkerPoolRequest', + 'GetWorkerPoolRequest', + 'DeleteWorkerPoolRequest', + 'UpdateWorkerPoolRequest', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'CreateWorkerPoolOperationMetadata', + 'UpdateWorkerPoolOperationMetadata', + 'DeleteWorkerPoolOperationMetadata', + }, +) + + +class RetryBuildRequest(proto.Message): + r"""Specifies a build to retry. + + Attributes: + name (str): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. Build ID of the original build. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RunBuildTriggerRequest(proto.Message): + r"""Specifies a build trigger to run and the source to use. + + Attributes: + name (str): + The name of the ``Trigger`` to run. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project. + trigger_id (str): + Required. ID of the trigger. + source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Source to build against this trigger. + Branch and tag names cannot consist of regular + expressions. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=3, + message='RepoSource', + ) + + +class StorageSource(proto.Message): + r"""Location of the source in an archive file in Cloud Storage. + + Attributes: + bucket (str): + Cloud Storage bucket containing the source (see `Bucket Name + Requirements `__). + object_ (str): + Cloud Storage object containing the source. + + This object must be a zipped (``.zip``) or gzipped archive + file (``.tar.gz``) containing source to build. + generation (int): + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +class GitSource(proto.Message): + r"""Location of the source in any accessible Git repository. + + Attributes: + url (str): + Location of the Git repo to build. + + This will be used as a ``git remote``, see + https://git-scm.com/docs/git-remote. + dir_ (str): + Directory, relative to the source root, in which to run the + build. + + This must be a relative path. If a step's ``dir`` is + specified and is an absolute path, this value is ignored for + that step's execution. + revision (str): + The revision to fetch from the Git repository such as a + branch, a tag, a commit SHA, or any Git ref. + + Cloud Build uses ``git fetch`` to fetch the revision from + the Git repository; therefore make sure that the string you + provide for ``revision`` is parsable by the command. For + information on string values accepted by ``git fetch``, see + https://git-scm.com/docs/gitrevisions#_specifying_revisions. + For information on ``git fetch``, see + https://git-scm.com/docs/git-fetch. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + dir_: str = proto.Field( + proto.STRING, + number=5, + ) + revision: str = proto.Field( + proto.STRING, + number=6, + ) + + +class RepoSource(proto.Message): + r"""Location of the source in a Google Cloud Source Repository. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + ID of the project that owns the Cloud Source + Repository. If omitted, the project ID + requesting the build is assumed. + repo_name (str): + Name of the Cloud Source Repository. + branch_name (str): + Regex matching branches to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``revision``. + tag_name (str): + Regex matching tags to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``revision``. + commit_sha (str): + Explicit commit SHA to build. + + This field is a member of `oneof`_ ``revision``. + dir_ (str): + Directory, relative to the source root, in which to run the + build. + + This must be a relative path. If a step's ``dir`` is + specified and is an absolute path, this value is ignored for + that step's execution. + invert_regex (bool): + Only trigger a build if the revision regex + does NOT match the revision regex. + substitutions (MutableMapping[str, str]): + Substitutions to use in a triggered build. + Should only be used with RunBuildTrigger + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + repo_name: str = proto.Field( + proto.STRING, + number=2, + ) + branch_name: str = proto.Field( + proto.STRING, + number=3, + oneof='revision', + ) + tag_name: str = proto.Field( + proto.STRING, + number=4, + oneof='revision', + ) + commit_sha: str = proto.Field( + proto.STRING, + number=5, + oneof='revision', + ) + dir_: str = proto.Field( + proto.STRING, + number=7, + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=8, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + + +class StorageSourceManifest(proto.Message): + r"""Location of the source manifest in Cloud Storage. This feature is in + Preview; see description + `here `__. + + Attributes: + bucket (str): + Cloud Storage bucket containing the source manifest (see + `Bucket Name + Requirements `__). + object_ (str): + Cloud Storage object containing the source + manifest. + This object must be a JSON file. + generation (int): + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + +class Source(proto.Message): + r"""Location of the source in a supported storage service. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): + If provided, get the source from this + location in Cloud Storage. + + This field is a member of `oneof`_ ``source``. + repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + If provided, get the source from this + location in a Cloud Source Repository. + + This field is a member of `oneof`_ ``source``. + git_source (google.cloud.devtools.cloudbuild_v1.types.GitSource): + If provided, get the source from this Git + repository. + + This field is a member of `oneof`_ ``source``. + storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): + If provided, get the source from this manifest in Cloud + Storage. This feature is in Preview; see description + `here `__. + + This field is a member of `oneof`_ ``source``. + """ + + storage_source: 'StorageSource' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='StorageSource', + ) + repo_source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='RepoSource', + ) + git_source: 'GitSource' = proto.Field( + proto.MESSAGE, + number=5, + oneof='source', + message='GitSource', + ) + storage_source_manifest: 'StorageSourceManifest' = proto.Field( + proto.MESSAGE, + number=8, + oneof='source', + message='StorageSourceManifest', + ) + + +class BuiltImage(proto.Message): + r"""An image built by the pipeline. + + Attributes: + name (str): + Name used to push the container image to Google Container + Registry, as presented to ``docker push``. + digest (str): + Docker Registry 2.0 digest. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified image. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + digest: str = proto.Field( + proto.STRING, + number=3, + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=4, + message='TimeSpan', + ) + + +class UploadedPythonPackage(proto.Message): + r"""Artifact uploaded using the PythonPackage directive. + + Attributes: + uri (str): + URI of the uploaded artifact. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the Python Artifact. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class UploadedMavenArtifact(proto.Message): + r"""A Maven artifact uploaded using the MavenArtifact directive. + + Attributes: + uri (str): + URI of the uploaded artifact. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the Maven Artifact. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class UploadedNpmPackage(proto.Message): + r"""An npm package uploaded to Artifact Registry using the + NpmPackage directive. + + Attributes: + uri (str): + URI of the uploaded npm package. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the npm package. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: 'FileHashes' = proto.Field( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + push_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + +class BuildStep(proto.Message): + r"""A step in the build pipeline. + + Attributes: + name (str): + Required. The name of the container image that will run this + particular build step. + + If the image is available in the host's Docker daemon's + cache, it will be run directly. If not, the host will + attempt to pull the image first, using the builder service + account's credentials if necessary. + + The Docker daemon's cache will already have the latest + versions of all of the officially supported build steps + (https://github.com/GoogleCloudPlatform/cloud-builders). The + Docker daemon will also have cached many of the layers for + some popular images, like "ubuntu", "debian", but they will + be refreshed at the time you attempt to use them. + + If you built an image in a previous build step, it will be + stored in the host's Docker daemon's cache and is available + to use as the name for a later build step. + env (MutableSequence[str]): + A list of environment variable definitions to + be used when running a step. + The elements are of the form "KEY=VALUE" for the + environment variable "KEY" being given the value + "VALUE". + args (MutableSequence[str]): + A list of arguments that will be presented to the step when + it is started. + + If the image used to run the step's container has an + entrypoint, the ``args`` are used as arguments to that + entrypoint. If the image does not define an entrypoint, the + first element in args is used as the entrypoint, and the + remainder will be used as arguments. + dir_ (str): + Working directory to use when running this step's container. + + If this value is a relative path, it is relative to the + build's working directory. If this value is absolute, it may + be outside the build's working directory, in which case the + contents of the path may not be persisted across build step + executions, unless a ``volume`` for that path is specified. + + If the build specifies a ``RepoSource`` with ``dir`` and a + step with a ``dir``, which specifies an absolute path, the + ``RepoSource`` ``dir`` is ignored for the step's execution. + id (str): + Unique identifier for this build step, used in ``wait_for`` + to reference this build step as a dependency. + wait_for (MutableSequence[str]): + The ID(s) of the step(s) that this build step depends on. + This build step will not start until all the build steps in + ``wait_for`` have completed successfully. If ``wait_for`` is + empty, this build step will start when all previous build + steps in the ``Build.Steps`` list have completed + successfully. + entrypoint (str): + Entrypoint to be used instead of the build + step image's default entrypoint. If unset, the + image's default entrypoint is used. + secret_env (MutableSequence[str]): + A list of environment variables which are encrypted using a + Cloud Key Management Service crypto key. These values must + be specified in the build's ``Secret``. + volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): + List of volumes to mount into the build step. + Each volume is created as an empty volume prior + to execution of the build step. Upon completion + of the build, volumes and their contents are + discarded. + + Using a named volume in only one step is not + valid as it is indicative of a build request + with an incorrect configuration. + timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + executing this build step. + pull_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pulling this build step's builder image only. + timeout (google.protobuf.duration_pb2.Duration): + Time limit for executing this build step. If + not defined, the step has no time limit and will + be allowed to continue to run until either it + completes or the build itself times out. + status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): + Output only. Status of the build step. At + this time, build step status is only updated on + build completion; step status is not updated in + real-time as the build progresses. + allow_failure (bool): + Allow this build step to fail without failing the entire + build. + + If false, the entire build will fail if this step fails. + Otherwise, the build will succeed, but this step will still + have a failure status. Error information will be reported in + the failure_detail field. + exit_code (int): + Output only. Return code from running the + step. + allow_exit_codes (MutableSequence[int]): + Allow this build step to fail without failing the entire + build if and only if the exit code is one of the specified + codes. If allow_failure is also specified, this field will + take precedence. + script (str): + A shell script to be executed in the step. + When script is provided, the user cannot specify + the entrypoint or args. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + dir_: str = proto.Field( + proto.STRING, + number=4, + ) + id: str = proto.Field( + proto.STRING, + number=5, + ) + wait_for: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + entrypoint: str = proto.Field( + proto.STRING, + number=7, + ) + secret_env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + volumes: MutableSequence['Volume'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='Volume', + ) + timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=10, + message='TimeSpan', + ) + pull_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=13, + message='TimeSpan', + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + message=duration_pb2.Duration, + ) + status: 'Build.Status' = proto.Field( + proto.ENUM, + number=12, + enum='Build.Status', + ) + allow_failure: bool = proto.Field( + proto.BOOL, + number=14, + ) + exit_code: int = proto.Field( + proto.INT32, + number=16, + ) + allow_exit_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=18, + ) + script: str = proto.Field( + proto.STRING, + number=19, + ) + + +class Volume(proto.Message): + r"""Volume describes a Docker container volume which is mounted + into build steps in order to persist files across build step + execution. + + Attributes: + name (str): + Name of the volume to mount. + Volume names must be unique per build step and + must be valid names for Docker volumes. Each + named volume must be used by at least two build + steps. + path (str): + Path at which to mount the volume. + Paths must be absolute and cannot conflict with + other volume paths on the same build step or + with certain reserved volume paths. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Results(proto.Message): + r"""Artifacts created by the build pipeline. + + Attributes: + images (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuiltImage]): + Container images that were built as a part of + the build. + build_step_images (MutableSequence[str]): + List of build step digests, in the order + corresponding to build step indices. + artifact_manifest (str): + Path to the artifact manifest for + non-container artifacts uploaded to Cloud + Storage. Only populated when artifacts are + uploaded to Cloud Storage. + num_artifacts (int): + Number of non-container artifacts uploaded to + Cloud Storage. Only populated when artifacts are + uploaded to Cloud Storage. + build_step_outputs (MutableSequence[bytes]): + List of build step outputs, produced by builder images, in + the order corresponding to build step indices. + + `Cloud + Builders `__ + can produce this output by writing to + ``$BUILDER_OUTPUT/output``. Only the first 4KB of data is + stored. + artifact_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Time to push all non-container artifacts to + Cloud Storage. + python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedPythonPackage]): + Python artifacts uploaded to Artifact + Registry at the end of the build. + maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedMavenArtifact]): + Maven artifacts uploaded to Artifact Registry + at the end of the build. + npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedNpmPackage]): + Npm packages uploaded to Artifact Registry at + the end of the build. + """ + + images: MutableSequence['BuiltImage'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='BuiltImage', + ) + build_step_images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + artifact_manifest: str = proto.Field( + proto.STRING, + number=4, + ) + num_artifacts: int = proto.Field( + proto.INT64, + number=5, + ) + build_step_outputs: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=6, + ) + artifact_timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=7, + message='TimeSpan', + ) + python_packages: MutableSequence['UploadedPythonPackage'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='UploadedPythonPackage', + ) + maven_artifacts: MutableSequence['UploadedMavenArtifact'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='UploadedMavenArtifact', + ) + npm_packages: MutableSequence['UploadedNpmPackage'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='UploadedNpmPackage', + ) + + +class ArtifactResult(proto.Message): + r"""An artifact that was uploaded during a build. This + is a single record in the artifact manifest JSON file. + + Attributes: + location (str): + The path of an artifact in a Cloud Storage bucket, with the + generation number. For example, + ``gs://mybucket/path/to/output.jar#generation``. + file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): + The file hash of the artifact. + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + file_hash: MutableSequence['FileHashes'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FileHashes', + ) + + +class Build(proto.Message): + r"""A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find source code, + how to build it (for example, the builder image to run on the + source), and where to store the built artifacts. + + Fields can include the following variables, which will be expanded + when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $LOCATION: the location/region of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by RepoSource. + - $BRANCH_NAME: the branch name specified by RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by + RepoSource or resolved from the specified branch or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. + + Attributes: + name (str): + Output only. The 'Build' name with format: + ``projects/{project}/locations/{location}/builds/{build}``, + where {build} is a unique identifier generated by the + service. + id (str): + Output only. Unique identifier of the build. + project_id (str): + Output only. ID of the project. + status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): + Output only. Status of the build. + status_detail (str): + Output only. Customer-readable message about + the current status. + source (google.cloud.devtools.cloudbuild_v1.types.Source): + The location of the source files to build. + steps (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildStep]): + Required. The operations to be performed on + the workspace. + results (google.cloud.devtools.cloudbuild_v1.types.Results): + Output only. Results of the build. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to + create the build was received. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which execution of the + build was started. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which execution of the build was + finished. + + The difference between finish_time and start_time is the + duration of the build's execution. + timeout (google.protobuf.duration_pb2.Duration): + Amount of time that this build should be allowed to run, to + second granularity. If this amount of time elapses, work on + the build will cease and the build status will be + ``TIMEOUT``. + + ``timeout`` starts ticking from ``startTime``. + + Default time is 60 minutes. + images (MutableSequence[str]): + A list of images to be pushed upon the successful completion + of all build steps. + + The images are pushed using the builder service account's + credentials. + + The digests of the pushed images will be stored in the + ``Build`` resource's results field. + + If any of the images fail to be pushed, the build status is + marked ``FAILURE``. + queue_ttl (google.protobuf.duration_pb2.Duration): + TTL in queue for this build. If provided and the build is + enqueued longer than this value, the build will expire and + the build status will be ``EXPIRED``. + + The TTL starts ticking from create_time. + artifacts (google.cloud.devtools.cloudbuild_v1.types.Artifacts): + Artifacts produced by the build that should + be uploaded upon successful completion of all + build steps. + logs_bucket (str): + Cloud Storage bucket where logs should be written (see + `Bucket Name + Requirements `__). + Logs file names will be of the format + ``${logs_bucket}/log-${build_id}.txt``. + source_provenance (google.cloud.devtools.cloudbuild_v1.types.SourceProvenance): + Output only. A permanent fixed identifier for + source. + build_trigger_id (str): + Output only. The ID of the ``BuildTrigger`` that triggered + this build, if it was triggered automatically. + options (google.cloud.devtools.cloudbuild_v1.types.BuildOptions): + Special options for this build. + log_url (str): + Output only. URL to logs for this build in + Google Cloud Console. + substitutions (MutableMapping[str, str]): + Substitutions data for ``Build`` resource. + tags (MutableSequence[str]): + Tags for annotation of a ``Build``. These are not docker + tags. + secrets (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Secret]): + Secrets to decrypt using Cloud Key Management Service. Note: + Secret Manager is the recommended technique for managing + sensitive data with Cloud Build. Use ``available_secrets`` + to configure builds to access secrets from Secret Manager. + For instructions, see: + https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets + timing (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.TimeSpan]): + Output only. Stores timing information for phases of the + build. Valid keys are: + + - BUILD: time to execute all build steps. + - PUSH: time to push all artifacts including docker images + and non docker artifacts. + - FETCHSOURCE: time to fetch source. + - SETUPBUILD: time to set up build. + + If the build does not specify source or images, these keys + will not be included. + approval (google.cloud.devtools.cloudbuild_v1.types.BuildApproval): + Output only. Describes this build's approval + configuration, status, and result. + service_account (str): + IAM service account whose credentials will be used at build + runtime. Must be of the format + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT + can be email address or uniqueId of the service account. + available_secrets (google.cloud.devtools.cloudbuild_v1.types.Secrets): + Secrets and secret environment variables. + warnings (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build.Warning]): + Output only. Non-fatal problems encountered + during the execution of the build. + failure_info (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo): + Output only. Contains information about the + build when status=FAILURE. + """ + class Status(proto.Enum): + r"""Possible status of a build or build step. + + Values: + STATUS_UNKNOWN (0): + Status of the build is unknown. + PENDING (10): + Build has been created and is pending + execution and queuing. It has not been queued. + QUEUED (1): + Build or step is queued; work has not yet + begun. + WORKING (2): + Build or step is being executed. + SUCCESS (3): + Build or step finished successfully. + FAILURE (4): + Build or step failed to complete + successfully. + INTERNAL_ERROR (5): + Build or step failed due to an internal + cause. + TIMEOUT (6): + Build or step took longer than was allowed. + CANCELLED (7): + Build or step was canceled by a user. + EXPIRED (9): + Build was enqueued for longer than the value of + ``queue_ttl``. + """ + STATUS_UNKNOWN = 0 + PENDING = 10 + QUEUED = 1 + WORKING = 2 + SUCCESS = 3 + FAILURE = 4 + INTERNAL_ERROR = 5 + TIMEOUT = 6 + CANCELLED = 7 + EXPIRED = 9 + + class Warning(proto.Message): + r"""A non-fatal problem encountered during the execution of the + build. + + Attributes: + text (str): + Explanation of the warning generated. + priority (google.cloud.devtools.cloudbuild_v1.types.Build.Warning.Priority): + The priority for this warning. + """ + class Priority(proto.Enum): + r"""The relative importance of this warning. + + Values: + PRIORITY_UNSPECIFIED (0): + Should not be used. + INFO (1): + e.g. deprecation warnings and alternative + feature highlights. + WARNING (2): + e.g. automated detection of possible issues + with the build. + ALERT (3): + e.g. alerts that a feature used in the build + is pending removal + """ + PRIORITY_UNSPECIFIED = 0 + INFO = 1 + WARNING = 2 + ALERT = 3 + + text: str = proto.Field( + proto.STRING, + number=1, + ) + priority: 'Build.Warning.Priority' = proto.Field( + proto.ENUM, + number=2, + enum='Build.Warning.Priority', + ) + + class FailureInfo(proto.Message): + r"""A fatal problem encountered during the execution of the + build. + + Attributes: + type_ (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo.FailureType): + The name of the failure. + detail (str): + Explains the failure issue in more detail + using hard-coded text. + """ + class FailureType(proto.Enum): + r"""The name of a fatal problem encountered during the execution + of the build. + + Values: + FAILURE_TYPE_UNSPECIFIED (0): + Type unspecified + PUSH_FAILED (1): + Unable to push the image to the repository. + PUSH_IMAGE_NOT_FOUND (2): + Final image not found. + PUSH_NOT_AUTHORIZED (3): + Unauthorized push of the final image. + LOGGING_FAILURE (4): + Backend logging failures. Should retry. + USER_BUILD_STEP (5): + A build step has failed. + FETCH_SOURCE_FAILED (6): + The source fetching has failed. + """ + FAILURE_TYPE_UNSPECIFIED = 0 + PUSH_FAILED = 1 + PUSH_IMAGE_NOT_FOUND = 2 + PUSH_NOT_AUTHORIZED = 3 + LOGGING_FAILURE = 4 + USER_BUILD_STEP = 5 + FETCH_SOURCE_FAILED = 6 + + type_: 'Build.FailureInfo.FailureType' = proto.Field( + proto.ENUM, + number=1, + enum='Build.FailureInfo.FailureType', + ) + detail: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=45, + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=16, + ) + status: Status = proto.Field( + proto.ENUM, + number=2, + enum=Status, + ) + status_detail: str = proto.Field( + proto.STRING, + number=24, + ) + source: 'Source' = proto.Field( + proto.MESSAGE, + number=3, + message='Source', + ) + steps: MutableSequence['BuildStep'] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='BuildStep', + ) + results: 'Results' = proto.Field( + proto.MESSAGE, + number=10, + message='Results', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + queue_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=40, + message=duration_pb2.Duration, + ) + artifacts: 'Artifacts' = proto.Field( + proto.MESSAGE, + number=37, + message='Artifacts', + ) + logs_bucket: str = proto.Field( + proto.STRING, + number=19, + ) + source_provenance: 'SourceProvenance' = proto.Field( + proto.MESSAGE, + number=21, + message='SourceProvenance', + ) + build_trigger_id: str = proto.Field( + proto.STRING, + number=22, + ) + options: 'BuildOptions' = proto.Field( + proto.MESSAGE, + number=23, + message='BuildOptions', + ) + log_url: str = proto.Field( + proto.STRING, + number=25, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=29, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=31, + ) + secrets: MutableSequence['Secret'] = proto.RepeatedField( + proto.MESSAGE, + number=32, + message='Secret', + ) + timing: MutableMapping[str, 'TimeSpan'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=33, + message='TimeSpan', + ) + approval: 'BuildApproval' = proto.Field( + proto.MESSAGE, + number=44, + message='BuildApproval', + ) + service_account: str = proto.Field( + proto.STRING, + number=42, + ) + available_secrets: 'Secrets' = proto.Field( + proto.MESSAGE, + number=47, + message='Secrets', + ) + warnings: MutableSequence[Warning] = proto.RepeatedField( + proto.MESSAGE, + number=49, + message=Warning, + ) + failure_info: FailureInfo = proto.Field( + proto.MESSAGE, + number=51, + message=FailureInfo, + ) + + +class Artifacts(proto.Message): + r"""Artifacts produced by a build that should be uploaded upon + successful completion of all build steps. + + Attributes: + images (MutableSequence[str]): + A list of images to be pushed upon the + successful completion of all build steps. + + The images will be pushed using the builder + service account's credentials. + The digests of the pushed images will be stored + in the Build resource's results field. + + If any of the images fail to be pushed, the + build is marked FAILURE. + objects (google.cloud.devtools.cloudbuild_v1.types.Artifacts.ArtifactObjects): + A list of objects to be uploaded to Cloud + Storage upon successful completion of all build + steps. + Files in the workspace matching specified paths + globs will be uploaded to the specified Cloud + Storage location using the builder service + account's credentials. + + The location and generation of the uploaded + objects will be stored in the Build resource's + results field. + + If any objects fail to be pushed, the build is + marked FAILURE. + maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.MavenArtifact]): + A list of Maven artifacts to be uploaded to + Artifact Registry upon successful completion of + all build steps. + Artifacts in the workspace matching specified + paths globs will be uploaded to the specified + Artifact Registry repository using the builder + service account's credentials. + + If any artifacts fail to be pushed, the build is + marked FAILURE. + python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.PythonPackage]): + A list of Python packages to be uploaded to + Artifact Registry upon successful completion of + all build steps. + The build service account credentials will be + used to perform the upload. + If any objects fail to be pushed, the build is + marked FAILURE. + npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.NpmPackage]): + A list of npm packages to be uploaded to + Artifact Registry upon successful completion of + all build steps. + Npm packages in the specified paths will be + uploaded to the specified Artifact Registry + repository using the builder service account's + credentials. + + If any packages fail to be pushed, the build is + marked FAILURE. + """ + + class ArtifactObjects(proto.Message): + r"""Files in the workspace to upload to Cloud Storage upon + successful completion of all build steps. + + Attributes: + location (str): + Cloud Storage bucket and optional object path, in the form + "gs://bucket/path/to/somewhere/". (see `Bucket Name + Requirements `__). + + Files in the workspace matching any path pattern will be + uploaded to Cloud Storage with this location as a prefix. + paths (MutableSequence[str]): + Path globs used to match files in the build's + workspace. + timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing all artifact objects. + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + timing: 'TimeSpan' = proto.Field( + proto.MESSAGE, + number=3, + message='TimeSpan', + ) + + class MavenArtifact(proto.Message): + r"""A Maven artifact to upload to Artifact Registry upon + successful completion of all build steps. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-maven.pkg.dev/$PROJECT/$REPOSITORY" + Artifact in the workspace specified by path will + be uploaded to Artifact Registry with this + location as a prefix. + path (str): + Path to an artifact in the build's workspace + to be uploaded to Artifact Registry. + This can be either an absolute path, + e.g. + /workspace/my-app/target/my-app-1.0.SNAPSHOT.jar + or a relative path from /workspace, + e.g. my-app/target/my-app-1.0.SNAPSHOT.jar. + artifact_id (str): + Maven ``artifactId`` value used when uploading the artifact + to Artifact Registry. + group_id (str): + Maven ``groupId`` value used when uploading the artifact to + Artifact Registry. + version (str): + Maven ``version`` value used when uploading the artifact to + Artifact Registry. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + artifact_id: str = proto.Field( + proto.STRING, + number=3, + ) + group_id: str = proto.Field( + proto.STRING, + number=4, + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + + class PythonPackage(proto.Message): + r"""Python package to upload to Artifact Registry upon successful + completion of all build steps. A package can encapsulate + multiple objects to be uploaded to a single repository. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-python.pkg.dev/$PROJECT/$REPOSITORY" + Files in the workspace matching any path pattern + will be uploaded to Artifact Registry with this + location as a prefix. + paths (MutableSequence[str]): + Path globs used to match files in the build's workspace. For + Python/ Twine, this is usually ``dist/*``, and sometimes + additionally an ``.asc`` file. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class NpmPackage(proto.Message): + r"""Npm package to upload to Artifact Registry upon successful + completion of all build steps. + + Attributes: + repository (str): + Artifact Registry repository, in the form + "https://$REGION-npm.pkg.dev/$PROJECT/$REPOSITORY" + Npm package in the workspace specified by path + will be zipped and uploaded to Artifact Registry + with this location as a prefix. + package_path (str): + Path to the package.json. + e.g. workspace/path/to/package + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + package_path: str = proto.Field( + proto.STRING, + number=2, + ) + + images: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + objects: ArtifactObjects = proto.Field( + proto.MESSAGE, + number=2, + message=ArtifactObjects, + ) + maven_artifacts: MutableSequence[MavenArtifact] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=MavenArtifact, + ) + python_packages: MutableSequence[PythonPackage] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=PythonPackage, + ) + npm_packages: MutableSequence[NpmPackage] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=NpmPackage, + ) + + +class TimeSpan(proto.Message): + r"""Start and end times for a build execution phase. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start of time span. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End of time span. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BuildOperationMetadata(proto.Message): + r"""Metadata for build operations. + + Attributes: + build (google.cloud.devtools.cloudbuild_v1.types.Build): + The build that the operation is tracking. + """ + + build: 'Build' = proto.Field( + proto.MESSAGE, + number=1, + message='Build', + ) + + +class SourceProvenance(proto.Message): + r"""Provenance of the source. Ways to find the original source, + or verify that some source was used for this build. + + Attributes: + resolved_storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): + A copy of the build's ``source.storage_source``, if exists, + with any generations resolved. + resolved_repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + A copy of the build's ``source.repo_source``, if exists, + with any revisions resolved. + resolved_storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): + A copy of the build's ``source.storage_source_manifest``, if + exists, with any revisions resolved. This feature is in + Preview. + file_hashes (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.FileHashes]): + Output only. Hash(es) of the build source, which can be used + to verify that the original source integrity was maintained + in the build. Note that ``FileHashes`` will only be + populated if ``BuildOptions`` has requested a + ``SourceProvenanceHash``. + + The keys to this map are file paths used as build source and + the values contain the hash values for those files. + + If the build source came in a single package such as a + gzipped tarfile (``.tar.gz``), the ``FileHash`` will be for + the single path to that file. + """ + + resolved_storage_source: 'StorageSource' = proto.Field( + proto.MESSAGE, + number=3, + message='StorageSource', + ) + resolved_repo_source: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=6, + message='RepoSource', + ) + resolved_storage_source_manifest: 'StorageSourceManifest' = proto.Field( + proto.MESSAGE, + number=9, + message='StorageSourceManifest', + ) + file_hashes: MutableMapping[str, 'FileHashes'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message='FileHashes', + ) + + +class FileHashes(proto.Message): + r"""Container message for hashes of byte content of files, used + in SourceProvenance messages to verify integrity of source input + to the build. + + Attributes: + file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash]): + Collection of file hashes. + """ + + file_hash: MutableSequence['Hash'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Hash', + ) + + +class Hash(proto.Message): + r"""Container message for hash values. + + Attributes: + type_ (google.cloud.devtools.cloudbuild_v1.types.Hash.HashType): + The type of hash that was performed. + value (bytes): + The hash value. + """ + class HashType(proto.Enum): + r"""Specifies the hash algorithm, if any. + + Values: + NONE (0): + No hash requested. + SHA256 (1): + Use a sha256 hash. + MD5 (2): + Use a md5 hash. + SHA512 (4): + Use a sha512 hash. + """ + NONE = 0 + SHA256 = 1 + MD5 = 2 + SHA512 = 4 + + type_: HashType = proto.Field( + proto.ENUM, + number=1, + enum=HashType, + ) + value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class Secrets(proto.Message): + r"""Secrets and secret environment variables. + + Attributes: + secret_manager (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.SecretManagerSecret]): + Secrets in Secret Manager and associated + secret environment variable. + inline (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.InlineSecret]): + Secrets encrypted with KMS key and the + associated secret environment variable. + """ + + secret_manager: MutableSequence['SecretManagerSecret'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SecretManagerSecret', + ) + inline: MutableSequence['InlineSecret'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InlineSecret', + ) + + +class InlineSecret(proto.Message): + r"""Pairs a set of secret environment variables mapped to + encrypted values with the Cloud KMS key to use to decrypt the + value. + + Attributes: + kms_key_name (str): + Resource name of Cloud KMS crypto key to decrypt the + encrypted value. In format: + projects/\ */locations/*/keyRings/*/cryptoKeys/* + env_map (MutableMapping[str, bytes]): + Map of environment variable name to its + encrypted value. + Secret environment variables must be unique + across all of a build's secrets, and must be + used by at least one build step. Values can be + at most 64 KB in size. There can be at most 100 + secret values across all of a build's secrets. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + env_map: MutableMapping[str, bytes] = proto.MapField( + proto.STRING, + proto.BYTES, + number=2, + ) + + +class SecretManagerSecret(proto.Message): + r"""Pairs a secret environment variable with a SecretVersion in + Secret Manager. + + Attributes: + version_name (str): + Resource name of the SecretVersion. In format: + projects/\ */secrets/*/versions/\* + env (str): + Environment variable name to associate with + the secret. Secret environment variables must be + unique across all of a build's secrets, and must + be used by at least one build step. + """ + + version_name: str = proto.Field( + proto.STRING, + number=1, + ) + env: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Secret(proto.Message): + r"""Pairs a set of secret environment variables containing encrypted + values with the Cloud KMS key to use to decrypt the value. Note: Use + ``kmsKeyName`` with ``available_secrets`` instead of using + ``kmsKeyName`` with ``secret``. For instructions see: + https://cloud.google.com/cloud-build/docs/securing-builds/use-encrypted-credentials. + + Attributes: + kms_key_name (str): + Cloud KMS key name to use to decrypt these + envs. + secret_env (MutableMapping[str, bytes]): + Map of environment variable name to its + encrypted value. + Secret environment variables must be unique + across all of a build's secrets, and must be + used by at least one build step. Values can be + at most 64 KB in size. There can be at most 100 + secret values across all of a build's secrets. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + secret_env: MutableMapping[str, bytes] = proto.MapField( + proto.STRING, + proto.BYTES, + number=3, + ) + + +class CreateBuildRequest(proto.Message): + r"""Request to create a new build. + + Attributes: + parent (str): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Required. Build resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + build: 'Build' = proto.Field( + proto.MESSAGE, + number=2, + message='Build', + ) + + +class GetBuildRequest(proto.Message): + r"""Request to get a build. + + Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. ID of the build. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBuildsRequest(proto.Message): + r"""Request to list builds. + + Attributes: + parent (str): + The parent of the collection of ``Builds``. Format: + ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project. + page_size (int): + Number of results to return in the list. + page_token (str): + The page token for the next page of Builds. + + If unspecified, the first page of results is returned. + + If the token is rejected for any reason, INVALID_ARGUMENT + will be thrown. In this case, the token should be discarded, + and pagination should be restarted from the first page of + results. + + See https://google.aip.dev/158 for more. + filter (str): + The raw filter text to constrain the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=9, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ListBuildsResponse(proto.Message): + r"""Response including listed builds. + + Attributes: + builds (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build]): + Builds will be sorted by ``create_time``, descending. + next_page_token (str): + Token to receive the next page of results. + This will be absent if the end of the response + list has been reached. + """ + + @property + def raw_page(self): + return self + + builds: MutableSequence['Build'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Build', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelBuildRequest(proto.Message): + r"""Request to cancel an ongoing build. + + Attributes: + name (str): + The name of the ``Build`` to cancel. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + project_id (str): + Required. ID of the project. + id (str): + Required. ID of the build. + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ApproveBuildRequest(proto.Message): + r"""Request to approve or reject a pending build. + + Attributes: + name (str): + Required. Name of the target build. For example: + "projects/{$project_id}/builds/{$build_id}". + approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Approval decision and metadata. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + approval_result: 'ApprovalResult' = proto.Field( + proto.MESSAGE, + number=2, + message='ApprovalResult', + ) + + +class BuildApproval(proto.Message): + r"""BuildApproval describes a build's approval configuration, + state, and result. + + Attributes: + state (google.cloud.devtools.cloudbuild_v1.types.BuildApproval.State): + Output only. The state of this build's + approval. + config (google.cloud.devtools.cloudbuild_v1.types.ApprovalConfig): + Output only. Configuration for manual + approval of this build. + result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): + Output only. Result of manual approval for + this Build. + """ + class State(proto.Enum): + r"""Specifies the current state of a build's approval. + + Values: + STATE_UNSPECIFIED (0): + Default enum type. This should not be used. + PENDING (1): + Build approval is pending. + APPROVED (2): + Build approval has been approved. + REJECTED (3): + Build approval has been rejected. + CANCELLED (5): + Build was cancelled while it was still + pending approval. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + APPROVED = 2 + REJECTED = 3 + CANCELLED = 5 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + config: 'ApprovalConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='ApprovalConfig', + ) + result: 'ApprovalResult' = proto.Field( + proto.MESSAGE, + number=3, + message='ApprovalResult', + ) + + +class ApprovalConfig(proto.Message): + r"""ApprovalConfig describes configuration for manual approval of + a build. + + Attributes: + approval_required (bool): + Whether or not approval is needed. If this is + set on a build, it will become pending when + created, and will need to be explicitly approved + to start. + """ + + approval_required: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ApprovalResult(proto.Message): + r"""ApprovalResult describes the decision and associated metadata + of a manual approval of a build. + + Attributes: + approver_account (str): + Output only. Email of the user that called + the ApproveBuild API to approve or reject a + build at the time that the API was called. + approval_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the approval + decision was made. + decision (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult.Decision): + Required. The decision of this manual + approval. + comment (str): + Optional. An optional comment for this manual + approval result. + url (str): + Optional. An optional URL tied to this manual + approval result. This field is essentially the + same as comment, except that it will be rendered + by the UI differently. An example use case is a + link to an external job that approved this + Build. + """ + class Decision(proto.Enum): + r"""Specifies whether or not this manual approval result is to + approve or reject a build. + + Values: + DECISION_UNSPECIFIED (0): + Default enum type. This should not be used. + APPROVED (1): + Build is approved. + REJECTED (2): + Build is rejected. + """ + DECISION_UNSPECIFIED = 0 + APPROVED = 1 + REJECTED = 2 + + approver_account: str = proto.Field( + proto.STRING, + number=2, + ) + approval_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + decision: Decision = proto.Field( + proto.ENUM, + number=4, + enum=Decision, + ) + comment: str = proto.Field( + proto.STRING, + number=5, + ) + url: str = proto.Field( + proto.STRING, + number=6, + ) + + +class BuildTrigger(proto.Message): + r"""Configuration for an automated build in response to source + repository changes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource_name (str): + The ``Trigger`` name with format: + ``projects/{project}/locations/{location}/triggers/{trigger}``, + where {trigger} is a unique identifier generated by the + service. + id (str): + Output only. Unique identifier of the + trigger. + description (str): + Human-readable description of this trigger. + name (str): + User-assigned name of the trigger. Must be + unique within the project. Trigger names must + meet the following requirements: + + They must contain only alphanumeric characters + and dashes. + They can be 1-64 characters long. + + They must begin and end with an alphanumeric + character. + tags (MutableSequence[str]): + Tags for annotation of a ``BuildTrigger`` + trigger_template (google.cloud.devtools.cloudbuild_v1.types.RepoSource): + Template describing the types of source changes to trigger a + build. + + Branch and tag names in trigger templates are interpreted as + regular expressions. Any branch or tag change that matches + that regular expression will trigger a build. + + Mutually exclusive with ``github``. + github (google.cloud.devtools.cloudbuild_v1.types.GitHubEventsConfig): + GitHubEventsConfig describes the configuration of a trigger + that creates a build whenever a GitHub event is received. + + Mutually exclusive with ``trigger_template``. + pubsub_config (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig): + PubsubConfig describes the configuration of a + trigger that creates a build whenever a Pub/Sub + message is published. + webhook_config (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig): + WebhookConfig describes the configuration of + a trigger that creates a build whenever a + webhook is sent to a trigger's webhook URL. + autodetect (bool): + Autodetect build configuration. The + following precedence is used (case insensitive): + 1. cloudbuild.yaml + 2. cloudbuild.yml + 3. cloudbuild.json + 4. Dockerfile + + Currently only available for GitHub App + Triggers. + + This field is a member of `oneof`_ ``build_template``. + build (google.cloud.devtools.cloudbuild_v1.types.Build): + Contents of the build template. + + This field is a member of `oneof`_ ``build_template``. + filename (str): + Path, from the source root, to the build + configuration file (i.e. cloudbuild.yaml). + + This field is a member of `oneof`_ ``build_template``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the trigger was + created. + disabled (bool): + If true, the trigger will never automatically + execute a build. + substitutions (MutableMapping[str, str]): + Substitutions for Build resource. The keys must match the + following regular expression: ``^_[A-Z0-9_]+$``. + ignored_files (MutableSequence[str]): + ignored_files and included_files are file glob matches using + https://golang.org/pkg/path/filepath/#Match extended with + support for "**". + + If ignored_files and changed files are both empty, then they + are not used to determine whether or not to trigger a build. + + If ignored_files is not empty, then we ignore any files that + match any of the ignored_file globs. If the change has no + files that are outside of the ignored_files globs, then we + do not trigger a build. + included_files (MutableSequence[str]): + If any of the files altered in the commit pass the + ignored_files filter and included_files is empty, then as + far as this filter is concerned, we should trigger the + build. + + If any of the files altered in the commit pass the + ignored_files filter and included_files is not empty, then + we make sure that at least one of those files matches a + included_files glob. If not, then we do not trigger a build. + filter (str): + Optional. A Common Expression Language + string. + service_account (str): + The service account used for all user-controlled operations + including UpdateBuildTrigger, RunBuildTrigger, CreateBuild, + and CancelBuild. If no service account is set, then the + standard Cloud Build service account + ([PROJECT_NUM]@system.gserviceaccount.com) will be used + instead. Format: + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`` + repository_event_config (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig): + The configuration of a trigger that creates a + build whenever an event from Repo API is + received. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=34, + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=10, + ) + name: str = proto.Field( + proto.STRING, + number=21, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=19, + ) + trigger_template: 'RepoSource' = proto.Field( + proto.MESSAGE, + number=7, + message='RepoSource', + ) + github: 'GitHubEventsConfig' = proto.Field( + proto.MESSAGE, + number=13, + message='GitHubEventsConfig', + ) + pubsub_config: 'PubsubConfig' = proto.Field( + proto.MESSAGE, + number=29, + message='PubsubConfig', + ) + webhook_config: 'WebhookConfig' = proto.Field( + proto.MESSAGE, + number=31, + message='WebhookConfig', + ) + autodetect: bool = proto.Field( + proto.BOOL, + number=18, + oneof='build_template', + ) + build: 'Build' = proto.Field( + proto.MESSAGE, + number=4, + oneof='build_template', + message='Build', + ) + filename: str = proto.Field( + proto.STRING, + number=8, + oneof='build_template', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + substitutions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + ignored_files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + included_files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + filter: str = proto.Field( + proto.STRING, + number=30, + ) + service_account: str = proto.Field( + proto.STRING, + number=33, + ) + repository_event_config: 'RepositoryEventConfig' = proto.Field( + proto.MESSAGE, + number=39, + message='RepositoryEventConfig', + ) + + +class RepositoryEventConfig(proto.Message): + r"""The configuration of a trigger that creates a build whenever + an event from Repo API is received. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + repository (str): + The resource name of the Repo API resource. + repository_type (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig.RepositoryType): + Output only. The type of the SCM vendor the + repository points to. + pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): + Filter to match changes in pull requests. + + This field is a member of `oneof`_ ``filter``. + push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): + Filter to match changes in refs like + branches, tags. + + This field is a member of `oneof`_ ``filter``. + """ + class RepositoryType(proto.Enum): + r"""All possible SCM repo types from Repo API. + + Values: + REPOSITORY_TYPE_UNSPECIFIED (0): + If unspecified, RepositoryType defaults to + GITHUB. + GITHUB (1): + The SCM repo is GITHUB. + GITHUB_ENTERPRISE (2): + The SCM repo is GITHUB Enterprise. + GITLAB_ENTERPRISE (3): + The SCM repo is GITLAB Enterprise. + """ + REPOSITORY_TYPE_UNSPECIFIED = 0 + GITHUB = 1 + GITHUB_ENTERPRISE = 2 + GITLAB_ENTERPRISE = 3 + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + repository_type: RepositoryType = proto.Field( + proto.ENUM, + number=2, + enum=RepositoryType, + ) + pull_request: 'PullRequestFilter' = proto.Field( + proto.MESSAGE, + number=3, + oneof='filter', + message='PullRequestFilter', + ) + push: 'PushFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='filter', + message='PushFilter', + ) + + +class GitHubEventsConfig(proto.Message): + r"""GitHubEventsConfig describes the configuration of a trigger + that creates a build whenever a GitHub event is received. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + installation_id (int): + The installationID that emits the GitHub + event. + owner (str): + Owner of the repository. For example: The + owner for + https://github.com/googlecloudplatform/cloud-builders + is "googlecloudplatform". + name (str): + Name of the repository. For example: The name + for + https://github.com/googlecloudplatform/cloud-builders + is "cloud-builders". + pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): + filter to match changes in pull requests. + + This field is a member of `oneof`_ ``event``. + push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): + filter to match changes in refs like + branches, tags. + + This field is a member of `oneof`_ ``event``. + """ + + installation_id: int = proto.Field( + proto.INT64, + number=1, + ) + owner: str = proto.Field( + proto.STRING, + number=6, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + pull_request: 'PullRequestFilter' = proto.Field( + proto.MESSAGE, + number=4, + oneof='event', + message='PullRequestFilter', + ) + push: 'PushFilter' = proto.Field( + proto.MESSAGE, + number=5, + oneof='event', + message='PushFilter', + ) + + +class PubsubConfig(proto.Message): + r"""PubsubConfig describes the configuration of a trigger that + creates a build whenever a Pub/Sub message is published. + + Attributes: + subscription (str): + Output only. Name of the subscription. Format is + ``projects/{project}/subscriptions/{subscription}``. + topic (str): + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. + service_account_email (str): + Service account that will make the push + request. + state (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig.State): + Potential issues with the underlying Pub/Sub + subscription configuration. Only populated on + get requests. + """ + class State(proto.Enum): + r"""Enumerates potential issues with the underlying Pub/Sub + subscription configuration. + + Values: + STATE_UNSPECIFIED (0): + The subscription configuration has not been + checked. + OK (1): + The Pub/Sub subscription is properly + configured. + SUBSCRIPTION_DELETED (2): + The subscription has been deleted. + TOPIC_DELETED (3): + The topic has been deleted. + SUBSCRIPTION_MISCONFIGURED (4): + Some of the subscription's field are + misconfigured. + """ + STATE_UNSPECIFIED = 0 + OK = 1 + SUBSCRIPTION_DELETED = 2 + TOPIC_DELETED = 3 + SUBSCRIPTION_MISCONFIGURED = 4 + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class WebhookConfig(proto.Message): + r"""WebhookConfig describes the configuration of a trigger that + creates a build whenever a webhook is sent to a trigger's + webhook URL. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + secret (str): + Required. Resource name for the secret + required as a URL parameter. + + This field is a member of `oneof`_ ``auth_method``. + state (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig.State): + Potential issues with the underlying Pub/Sub + subscription configuration. Only populated on + get requests. + """ + class State(proto.Enum): + r"""Enumerates potential issues with the Secret Manager secret + provided by the user. + + Values: + STATE_UNSPECIFIED (0): + The webhook auth configuration not been + checked. + OK (1): + The auth configuration is properly setup. + SECRET_DELETED (2): + The secret provided in auth_method has been deleted. + """ + STATE_UNSPECIFIED = 0 + OK = 1 + SECRET_DELETED = 2 + + secret: str = proto.Field( + proto.STRING, + number=3, + oneof='auth_method', + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class PullRequestFilter(proto.Message): + r"""PullRequestFilter contains filter properties for matching + GitHub Pull Requests. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + branch (str): + Regex of branches to match. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + comment_control (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter.CommentControl): + Configure builds to run whether a repository owner or + collaborator need to comment ``/gcbrun``. + invert_regex (bool): + If true, branches that do NOT match the git_ref will trigger + a build. + """ + class CommentControl(proto.Enum): + r"""Controls behavior of Pull Request comments. + + Values: + COMMENTS_DISABLED (0): + Do not require comments on Pull Requests + before builds are triggered. + COMMENTS_ENABLED (1): + Enforce that repository owners or + collaborators must comment on Pull Requests + before builds are triggered. + COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY (2): + Enforce that repository owners or + collaborators must comment on external + contributors' Pull Requests before builds are + triggered. + """ + COMMENTS_DISABLED = 0 + COMMENTS_ENABLED = 1 + COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2 + + branch: str = proto.Field( + proto.STRING, + number=2, + oneof='git_ref', + ) + comment_control: CommentControl = proto.Field( + proto.ENUM, + number=5, + enum=CommentControl, + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class PushFilter(proto.Message): + r"""Push contains filter properties for matching GitHub git + pushes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + branch (str): + Regexes matching branches to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + tag (str): + Regexes matching tags to build. + The syntax of the regular expressions accepted + is the syntax accepted by RE2 and described at + https://github.com/google/re2/wiki/Syntax + + This field is a member of `oneof`_ ``git_ref``. + invert_regex (bool): + When true, only trigger a build if the revision regex does + NOT match the git_ref regex. + """ + + branch: str = proto.Field( + proto.STRING, + number=2, + oneof='git_ref', + ) + tag: str = proto.Field( + proto.STRING, + number=3, + oneof='git_ref', + ) + invert_regex: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class CreateBuildTriggerRequest(proto.Message): + r"""Request to create a new ``BuildTrigger``. + + Attributes: + parent (str): + The parent resource where this trigger will be created. + Format: ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project for which to + configure automatic builds. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger: 'BuildTrigger' = proto.Field( + proto.MESSAGE, + number=2, + message='BuildTrigger', + ) + + +class GetBuildTriggerRequest(proto.Message): + r"""Returns the ``BuildTrigger`` with the specified ID. + + Attributes: + name (str): + The name of the ``Trigger`` to retrieve. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. Identifier (``id`` or ``name``) of the + ``BuildTrigger`` to get. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBuildTriggersRequest(proto.Message): + r"""Request to list existing ``BuildTriggers``. + + Attributes: + parent (str): + The parent of the collection of ``Triggers``. Format: + ``projects/{project}/locations/{location}`` + project_id (str): + Required. ID of the project for which to list + BuildTriggers. + page_size (int): + Number of results to return in the list. + page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBuildTriggersResponse(proto.Message): + r"""Response containing existing ``BuildTriggers``. + + Attributes: + triggers (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): + ``BuildTriggers`` for the project, sorted by ``create_time`` + descending. + next_page_token (str): + Token to receive the next page of results. + """ + + @property + def raw_page(self): + return self + + triggers: MutableSequence['BuildTrigger'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BuildTrigger', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteBuildTriggerRequest(proto.Message): + r"""Request to delete a ``BuildTrigger``. + + Attributes: + name (str): + The name of the ``Trigger`` to delete. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateBuildTriggerRequest(proto.Message): + r"""Request to update an existing ``BuildTrigger``. + + Attributes: + project_id (str): + Required. ID of the project that owns the + trigger. + trigger_id (str): + Required. ID of the ``BuildTrigger`` to update. + trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): + Required. ``BuildTrigger`` to update. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + trigger_id: str = proto.Field( + proto.STRING, + number=2, + ) + trigger: 'BuildTrigger' = proto.Field( + proto.MESSAGE, + number=3, + message='BuildTrigger', + ) + + +class BuildOptions(proto.Message): + r"""Optional arguments to enable specific features of builds. + + Attributes: + source_provenance_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash.HashType]): + Requested hash for SourceProvenance. + requested_verify_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.VerifyOption): + Requested verifiability options. + machine_type (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.MachineType): + Compute Engine machine type on which to run + the build. + disk_size_gb (int): + Requested disk size for the VM that runs the build. Note + that this is *NOT* "disk free"; some of the space will be + used by the operating system and build utilities. Also note + that this is the minimum disk size that will be allocated + for the build -- the build may run with a larger disk than + requested. At present, the maximum disk size is 2000GB; + builds that request more than the maximum are rejected with + an error. + substitution_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.SubstitutionOption): + Option to specify behavior when there is an error in the + substitution checks. + + NOTE: this is always set to ALLOW_LOOSE for triggered builds + and cannot be overridden in the build configuration file. + dynamic_substitutions (bool): + Option to specify whether or not to apply + bash style string operations to the + substitutions. + NOTE: this is always enabled for triggered + builds and cannot be overridden in the build + configuration file. + log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): + Option to define build log streaming behavior + to Cloud Storage. + worker_pool (str): + This field deprecated; please use ``pool.name`` instead. + pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): + Optional. Specification for execution on a ``WorkerPool``. + + See `running builds in a private + pool `__ + for more information. + logging (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LoggingMode): + Option to specify the logging mode, which + determines if and where build logs are stored. + env (MutableSequence[str]): + A list of global environment variable + definitions that will exist for all build steps + in this build. If a variable is defined in both + globally and in a build step, the variable will + use the build step value. + The elements are of the form "KEY=VALUE" for the + environment variable "KEY" being given the value + "VALUE". + secret_env (MutableSequence[str]): + A list of global environment variables, which are encrypted + using a Cloud Key Management Service crypto key. These + values must be specified in the build's ``Secret``. These + variables will be available to all build steps in this + build. + volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): + Global list of volumes to mount for ALL build + steps + Each volume is created as an empty volume prior + to starting the build process. Upon completion + of the build, volumes and their contents are + discarded. Global volume names and paths cannot + conflict with the volumes defined a build step. + + Using a global volume in a build with only one + step is not valid as it is indicative of a build + request with an incorrect configuration. + default_logs_bucket_behavior (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.DefaultLogsBucketBehavior): + Optional. Option to specify how default logs + buckets are setup. + """ + class VerifyOption(proto.Enum): + r"""Specifies the manner in which the build should be verified, if at + all. + + If a verified build is requested, and any part of the process to + generate and upload provenance fails, the build will also fail. + + If the build does not request verification then that process may + occur, but is not guaranteed to. If it does occur and fails, the + build will not fail. + + For more information, see `Viewing Build + Provenance `__. + + Values: + NOT_VERIFIED (0): + Not a verifiable build (the default). + VERIFIED (1): + Build must be verified. + """ + NOT_VERIFIED = 0 + VERIFIED = 1 + + class MachineType(proto.Enum): + r"""Supported Compute Engine machine types. For more information, see + `Machine + types `__. + + Values: + UNSPECIFIED (0): + Standard machine type. + N1_HIGHCPU_8 (1): + Highcpu machine with 8 CPUs. + N1_HIGHCPU_32 (2): + Highcpu machine with 32 CPUs. + E2_HIGHCPU_8 (5): + Highcpu e2 machine with 8 CPUs. + E2_HIGHCPU_32 (6): + Highcpu e2 machine with 32 CPUs. + E2_MEDIUM (7): + E2 machine with 1 CPU. + """ + UNSPECIFIED = 0 + N1_HIGHCPU_8 = 1 + N1_HIGHCPU_32 = 2 + E2_HIGHCPU_8 = 5 + E2_HIGHCPU_32 = 6 + E2_MEDIUM = 7 + + class SubstitutionOption(proto.Enum): + r"""Specifies the behavior when there is an error in the + substitution checks. + + Values: + MUST_MATCH (0): + Fails the build if error in substitutions + checks, like missing a substitution in the + template or in the map. + ALLOW_LOOSE (1): + Do not fail the build if error in + substitutions checks. + """ + MUST_MATCH = 0 + ALLOW_LOOSE = 1 + + class LogStreamingOption(proto.Enum): + r"""Specifies the behavior when writing build logs to Cloud + Storage. + + Values: + STREAM_DEFAULT (0): + Service may automatically determine build log + streaming behavior. + STREAM_ON (1): + Build logs should be streamed to Cloud + Storage. + STREAM_OFF (2): + Build logs should not be streamed to Cloud + Storage; they will be written when the build is + completed. + """ + STREAM_DEFAULT = 0 + STREAM_ON = 1 + STREAM_OFF = 2 + + class LoggingMode(proto.Enum): + r"""Specifies the logging mode. + + Values: + LOGGING_UNSPECIFIED (0): + The service determines the logging mode. The default is + ``LEGACY``. Do not rely on the default logging behavior as + it may change in the future. + LEGACY (1): + Build logs are stored in Cloud Logging and + Cloud Storage. + GCS_ONLY (2): + Build logs are stored in Cloud Storage. + STACKDRIVER_ONLY (3): + This option is the same as CLOUD_LOGGING_ONLY. + CLOUD_LOGGING_ONLY (5): + Build logs are stored in Cloud Logging. Selecting this + option will not allow `logs + streaming `__. + NONE (4): + Turn off all logging. No build logs will be + captured. + """ + LOGGING_UNSPECIFIED = 0 + LEGACY = 1 + GCS_ONLY = 2 + STACKDRIVER_ONLY = 3 + CLOUD_LOGGING_ONLY = 5 + NONE = 4 + + class DefaultLogsBucketBehavior(proto.Enum): + r"""Default GCS log bucket behavior options. + + Values: + DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): + Unspecified. + REGIONAL_USER_OWNED_BUCKET (1): + Bucket is located in user-owned project in + the same region as the build. The builder + service account must have access to create and + write to GCS buckets in the build project. + """ + DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 + REGIONAL_USER_OWNED_BUCKET = 1 + + class PoolOption(proto.Message): + r"""Details about how a build should be executed on a ``WorkerPool``. + + See `running builds in a private + pool `__ + for more information. + + Attributes: + name (str): + The ``WorkerPool`` resource to execute the build on. You + must have ``cloudbuild.workerpools.use`` on the project + hosting the WorkerPool. + + Format + projects/{project}/locations/{location}/workerPools/{workerPoolId} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + source_provenance_hash: MutableSequence['Hash.HashType'] = proto.RepeatedField( + proto.ENUM, + number=1, + enum='Hash.HashType', + ) + requested_verify_option: VerifyOption = proto.Field( + proto.ENUM, + number=2, + enum=VerifyOption, + ) + machine_type: MachineType = proto.Field( + proto.ENUM, + number=3, + enum=MachineType, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=6, + ) + substitution_option: SubstitutionOption = proto.Field( + proto.ENUM, + number=4, + enum=SubstitutionOption, + ) + dynamic_substitutions: bool = proto.Field( + proto.BOOL, + number=17, + ) + log_streaming_option: LogStreamingOption = proto.Field( + proto.ENUM, + number=5, + enum=LogStreamingOption, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=7, + ) + pool: PoolOption = proto.Field( + proto.MESSAGE, + number=19, + message=PoolOption, + ) + logging: LoggingMode = proto.Field( + proto.ENUM, + number=11, + enum=LoggingMode, + ) + env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + secret_env: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + volumes: MutableSequence['Volume'] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message='Volume', + ) + default_logs_bucket_behavior: DefaultLogsBucketBehavior = proto.Field( + proto.ENUM, + number=21, + enum=DefaultLogsBucketBehavior, + ) + + +class ReceiveTriggerWebhookRequest(proto.Message): + r"""ReceiveTriggerWebhookRequest [Experimental] is the request object + accepted by the ReceiveTriggerWebhook method. + + Attributes: + name (str): + The name of the ``ReceiveTriggerWebhook`` to retrieve. + Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + body (google.api.httpbody_pb2.HttpBody): + HTTP request body. + project_id (str): + Project in which the specified trigger lives + trigger (str): + Name of the trigger to run the payload + against + secret (str): + Secret token used for authorization if an + OAuth token isn't provided. + """ + + name: str = proto.Field( + proto.STRING, + number=5, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=1, + message=httpbody_pb2.HttpBody, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + trigger: str = proto.Field( + proto.STRING, + number=3, + ) + secret: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ReceiveTriggerWebhookResponse(proto.Message): + r"""ReceiveTriggerWebhookResponse [Experimental] is the response object + for the ReceiveTriggerWebhook method. + + """ + + +class WorkerPool(proto.Message): + r"""Configuration for a ``WorkerPool``. + + Cloud Build owns and maintains a pool of workers for general use and + have no access to a project's private network. By default, builds + submitted to Cloud Build will use a worker from this pool. + + If your build needs access to resources on a private network, create + and use a ``WorkerPool`` to run your builds. Private + ``WorkerPool``\ s give your builds access to any single VPC network + that you administer, including any on-prem resources connected to + that VPC network. For an overview of private pools, see `Private + pools + overview `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The resource name of the ``WorkerPool``, with + format + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + The value of ``{worker_pool}`` is provided by + ``worker_pool_id`` in ``CreateWorkerPool`` request and the + value of ``{location}`` is determined by the endpoint + accessed. + display_name (str): + A user-specified, human-readable name for the + ``WorkerPool``. If provided, this value must be 1-63 + characters. + uid (str): + Output only. A unique identifier for the ``WorkerPool``. + annotations (MutableMapping[str, str]): + User specified annotations. See + https://google.aip.dev/128#annotations + for more details such as format and size + limitations. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to create the + ``WorkerPool`` was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to update the + ``WorkerPool`` was received. + delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time at which the request to delete the + ``WorkerPool`` was received. + state (google.cloud.devtools.cloudbuild_v1.types.WorkerPool.State): + Output only. ``WorkerPool`` state. + private_pool_v1_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config): + Legacy Private Pool configuration. + + This field is a member of `oneof`_ ``config``. + etag (str): + Output only. Checksum computed by the server. + May be sent on update and delete requests to + ensure that the client has an up-to-date value + before proceeding. + """ + class State(proto.Enum): + r"""State of the ``WorkerPool``. + + Values: + STATE_UNSPECIFIED (0): + State of the ``WorkerPool`` is unknown. + CREATING (1): + ``WorkerPool`` is being created. + RUNNING (2): + ``WorkerPool`` is running. + DELETING (3): + ``WorkerPool`` is being deleted: cancelling builds and + draining workers. + DELETED (4): + ``WorkerPool`` is deleted. + UPDATING (5): + ``WorkerPool`` is being updated; new builds cannot be run. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + RUNNING = 2 + DELETING = 3 + DELETED = 4 + UPDATING = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + uid: str = proto.Field( + proto.STRING, + number=3, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + private_pool_v1_config: 'PrivatePoolV1Config' = proto.Field( + proto.MESSAGE, + number=12, + oneof='config', + message='PrivatePoolV1Config', + ) + etag: str = proto.Field( + proto.STRING, + number=11, + ) + + +class PrivatePoolV1Config(proto.Message): + r"""Configuration for a V1 ``PrivatePool``. + + Attributes: + worker_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.WorkerConfig): + Machine configuration for the workers in the + pool. + network_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig): + Network configuration for the pool. + """ + + class WorkerConfig(proto.Message): + r"""Defines the configuration to be used for creating workers in + the pool. + + Attributes: + machine_type (str): + Machine type of a worker, such as ``e2-medium``. See `Worker + pool config + file `__. + If left blank, Cloud Build will use a sensible default. + disk_size_gb (int): + Size of the disk attached to the worker, in GB. See `Worker + pool config + file `__. + Specify a value of up to 2000. If ``0`` is specified, Cloud + Build will use a standard disk size. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=2, + ) + + class NetworkConfig(proto.Message): + r"""Defines the network configuration for the pool. + + Attributes: + peered_network (str): + Required. Immutable. The network definition that the workers + are peered to. If this section is left empty, the workers + will be peered to ``WorkerPool.project_id`` on the service + producer network. Must be in the format + ``projects/{project}/global/networks/{network}``, where + ``{project}`` is a project number, such as ``12345``, and + ``{network}`` is the name of a VPC network in the project. + See `Understanding network configuration + options `__ + egress_option (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig.EgressOption): + Option to configure network egress for the + workers. + peered_network_ip_range (str): + Immutable. Subnet IP range within the peered network. This + is specified in CIDR notation with a slash and the subnet + prefix size. You can optionally specify an IP address before + the subnet prefix value. e.g. ``192.168.0.0/29`` would + specify an IP range starting at 192.168.0.0 with a prefix + size of 29 bits. ``/16`` would specify a prefix size of 16 + bits, with an automatically determined IP within the peered + VPC. If unspecified, a value of ``/24`` will be used. + """ + class EgressOption(proto.Enum): + r"""Defines the egress option for the pool. + + Values: + EGRESS_OPTION_UNSPECIFIED (0): + If set, defaults to PUBLIC_EGRESS. + NO_PUBLIC_EGRESS (1): + If set, workers are created without any + public address, which prevents network egress to + public IPs unless a network proxy is configured. + PUBLIC_EGRESS (2): + If set, workers are created with a public + address which allows for public internet egress. + """ + EGRESS_OPTION_UNSPECIFIED = 0 + NO_PUBLIC_EGRESS = 1 + PUBLIC_EGRESS = 2 + + peered_network: str = proto.Field( + proto.STRING, + number=1, + ) + egress_option: 'PrivatePoolV1Config.NetworkConfig.EgressOption' = proto.Field( + proto.ENUM, + number=2, + enum='PrivatePoolV1Config.NetworkConfig.EgressOption', + ) + peered_network_ip_range: str = proto.Field( + proto.STRING, + number=3, + ) + + worker_config: WorkerConfig = proto.Field( + proto.MESSAGE, + number=1, + message=WorkerConfig, + ) + network_config: NetworkConfig = proto.Field( + proto.MESSAGE, + number=2, + message=NetworkConfig, + ) + + +class CreateWorkerPoolRequest(proto.Message): + r"""Request to create a new ``WorkerPool``. + + Attributes: + parent (str): + Required. The parent resource where this worker pool will be + created. Format: + ``projects/{project}/locations/{location}``. + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. ``WorkerPool`` resource to create. + worker_pool_id (str): + Required. Immutable. The ID to use for the ``WorkerPool``, + which will become the final component of the resource name. + + This value should be 1-63 characters, and valid characters + are /[a-z][0-9]-/. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + worker_pool: 'WorkerPool' = proto.Field( + proto.MESSAGE, + number=2, + message='WorkerPool', + ) + worker_pool_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetWorkerPoolRequest(proto.Message): + r"""Request to get a ``WorkerPool`` with the specified name. + + Attributes: + name (str): + Required. The name of the ``WorkerPool`` to retrieve. + Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteWorkerPoolRequest(proto.Message): + r"""Request to delete a ``WorkerPool``. + + Attributes: + name (str): + Required. The name of the ``WorkerPool`` to delete. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + etag (str): + Optional. If provided, it must match the + server's etag on the workerpool for the request + to be processed. + allow_missing (bool): + If set to true, and the ``WorkerPool`` is not found, the + request will succeed but no action will be taken on the + server. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateWorkerPoolRequest(proto.Message): + r"""Request to update a ``WorkerPool``. + + Attributes: + worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): + Required. The ``WorkerPool`` to update. + + The ``name`` field is used to identify the ``WorkerPool`` to + update. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPool}``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask specifying which fields in ``worker_pool`` to update. + validate_only (bool): + If set, validate the request and preview the + response, but do not actually post it. + """ + + worker_pool: 'WorkerPool' = proto.Field( + proto.MESSAGE, + number=1, + message='WorkerPool', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListWorkerPoolsRequest(proto.Message): + r"""Request to list ``WorkerPool``\ s. + + Attributes: + parent (str): + Required. The parent of the collection of ``WorkerPools``. + Format: ``projects/{project}/locations/{location}``. + page_size (int): + The maximum number of ``WorkerPool``\ s to return. The + service may return fewer than this value. If omitted, the + server will use a sensible default. + page_token (str): + A page token, received from a previous ``ListWorkerPools`` + call. Provide this to retrieve the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkerPoolsResponse(proto.Message): + r"""Response containing existing ``WorkerPools``. + + Attributes: + worker_pools (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): + ``WorkerPools`` for the specified project. + next_page_token (str): + Continuation token used to page through large + result sets. Provide this value in a subsequent + ListWorkerPoolsRequest to return the next page + of results. + """ + + @property + def raw_page(self): + return self + + worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='WorkerPool', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``CreateWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` to create. Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``UpdateWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` being updated. + Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteWorkerPoolOperationMetadata(proto.Message): + r"""Metadata for the ``DeleteWorkerPool`` operation. + + Attributes: + worker_pool (str): + The resource name of the ``WorkerPool`` being deleted. + Format: + ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was created. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Time the operation was completed. + """ + + worker_pool: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 00000000..e09b880c --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/devtools/cloudbuild_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py new file mode 100644 index 00000000..a8280c5e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApproveBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py new file mode 100644 index 00000000..e90be4b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApproveBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_approve_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ApproveBuildRequest( + name="name_value", + ) + + # Make the request + operation = client.approve_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py new file mode 100644 index 00000000..73320372 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.cancel_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py new file mode 100644 index 00000000..656b5d59 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_cancel_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CancelBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.cancel_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py new file mode 100644 index 00000000..07750a37 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py new file mode 100644 index 00000000..173aea57 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateBuildRequest( + project_id="project_id_value", + ) + + # Make the request + operation = client.create_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py new file mode 100644 index 00000000..9fe3fcdf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.create_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py new file mode 100644 index 00000000..8ddcd2f3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.CreateBuildTriggerRequest( + project_id="project_id_value", + trigger=trigger, + ) + + # Make the request + response = client.create_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py new file mode 100644 index 00000000..cd0a773b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py new file mode 100644 index 00000000..80396e7e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_create_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.CreateWorkerPoolRequest( + parent="parent_value", + worker_pool_id="worker_pool_id_value", + ) + + # Make the request + operation = client.create_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py new file mode 100644 index 00000000..62955bcc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + await client.delete_build_trigger(request=request) + + +# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py new file mode 100644 index 00000000..249ba150 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_delete_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + client.delete_build_trigger(request=request) + + +# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py new file mode 100644 index 00000000..257fa9ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py new file mode 100644 index 00000000..a2b9f632 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_delete_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.DeleteWorkerPoolRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py new file mode 100644 index 00000000..585bce61 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = await client.get_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py new file mode 100644 index 00000000..d767fe6c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + response = client.get_build(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py new file mode 100644 index 00000000..373b419b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = await client.get_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py new file mode 100644 index 00000000..f2dd1102 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + response = client.get_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py new file mode 100644 index 00000000..1ad3016f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_worker_pool(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py new file mode 100644 index 00000000..fd50d2fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetWorkerPoolRequest( + name="name_value", + ) + + # Make the request + response = client.get_worker_pool(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py new file mode 100644 index 00000000..43b21efa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuildTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py new file mode 100644 index 00000000..86f6e1c1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuildTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_build_triggers(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildTriggersRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_build_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py new file mode 100644 index 00000000..30ad36a2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuilds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py new file mode 100644 index 00000000..9c2813c3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuilds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_builds(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListBuildsRequest( + project_id="project_id_value", + ) + + # Make the request + page_result = client.list_builds(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py new file mode 100644 index 00000000..378636ad --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkerPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py new file mode 100644 index 00000000..d23cdb76 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListWorkerPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_list_worker_pools(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ListWorkerPoolsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_worker_pools(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py new file mode 100644 index 00000000..133c477b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReceiveTriggerWebhook +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = await client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py new file mode 100644 index 00000000..839f241c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReceiveTriggerWebhook +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_receive_trigger_webhook(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.ReceiveTriggerWebhookRequest( + ) + + # Make the request + response = client.receive_trigger_webhook(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py new file mode 100644 index 00000000..8c671273 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetryBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py new file mode 100644 index 00000000..6b1d79f0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetryBuild +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_retry_build(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RetryBuildRequest( + project_id="project_id_value", + id="id_value", + ) + + # Make the request + operation = client.retry_build(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py new file mode 100644 index 00000000..1c33cfb2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py new file mode 100644 index 00000000..78b1a643 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_run_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.RunBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + # Make the request + operation = client.run_build_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py new file mode 100644 index 00000000..46d6ea7f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = await client.update_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py new file mode 100644 index 00000000..4022a4e4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBuildTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_update_build_trigger(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + trigger = cloudbuild_v1.BuildTrigger() + trigger.autodetect = True + + request = cloudbuild_v1.UpdateBuildTriggerRequest( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=trigger, + ) + + # Make the request + response = client.update_build_trigger(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py new file mode 100644 index 00000000..4152140a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py new file mode 100644 index 00000000..b7bab1b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateWorkerPool +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_update_worker_pool(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.UpdateWorkerPoolRequest( + ) + + # Make the request + operation = client.update_worker_pool(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json new file mode 100644 index 00000000..e379efab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -0,0 +1,3027 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.devtools.cloudbuild.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-build", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.approve_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ApproveBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "approval_result", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "approve_build" + }, + "description": "Sample for ApproveBuild", + "file": "cloudbuild_v1_generated_cloud_build_approve_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_approve_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.approve_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ApproveBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "approval_result", + "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "approve_build" + }, + "description": "Sample for ApproveBuild", + "file": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.cancel_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CancelBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "cancel_build" + }, + "description": "Sample for CancelBuild", + "file": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.cancel_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CancelBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "cancel_build" + }, + "description": "Sample for CancelBuild", + "file": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "create_build_trigger" + }, + "description": "Sample for CreateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "create_build_trigger" + }, + "description": "Sample for CreateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "build", + "type": "google.cloud.devtools.cloudbuild_v1.types.Build" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_build" + }, + "description": "Sample for CreateBuild", + "file": "cloudbuild_v1_generated_cloud_build_create_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "build", + "type": "google.cloud.devtools.cloudbuild_v1.types.Build" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_build" + }, + "description": "Sample for CreateBuild", + "file": "cloudbuild_v1_generated_cloud_build_create_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "worker_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_worker_pool" + }, + "description": "Sample for CreateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "CreateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "worker_pool_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_worker_pool" + }, + "description": "Sample for CreateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_build_trigger" + }, + "description": "Sample for DeleteBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_build_trigger" + }, + "description": "Sample for DeleteBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_worker_pool" + }, + "description": "Sample for DeleteWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "DeleteWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_worker_pool" + }, + "description": "Sample for DeleteWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "get_build_trigger" + }, + "description": "Sample for GetBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "get_build_trigger" + }, + "description": "Sample for GetBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "get_build" + }, + "description": "Sample for GetBuild", + "file": "cloudbuild_v1_generated_cloud_build_get_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", + "shortName": "get_build" + }, + "description": "Sample for GetBuild", + "file": "cloudbuild_v1_generated_cloud_build_get_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", + "shortName": "get_worker_pool" + }, + "description": "Sample for GetWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", + "shortName": "get_worker_pool" + }, + "description": "Sample for GetWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_build_triggers", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuildTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager", + "shortName": "list_build_triggers" + }, + "description": "Sample for ListBuildTriggers", + "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_build_triggers", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuildTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager", + "shortName": "list_build_triggers" + }, + "description": "Sample for ListBuildTriggers", + "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_builds", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuilds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager", + "shortName": "list_builds" + }, + "description": "Sample for ListBuilds", + "file": "cloudbuild_v1_generated_cloud_build_list_builds_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_builds_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_builds", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListBuilds" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager", + "shortName": "list_builds" + }, + "description": "Sample for ListBuilds", + "file": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_worker_pools", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListWorkerPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager", + "shortName": "list_worker_pools" + }, + "description": "Sample for ListWorkerPools", + "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_worker_pools", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ListWorkerPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager", + "shortName": "list_worker_pools" + }, + "description": "Sample for ListWorkerPools", + "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.receive_trigger_webhook", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ReceiveTriggerWebhook" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", + "shortName": "receive_trigger_webhook" + }, + "description": "Sample for ReceiveTriggerWebhook", + "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.receive_trigger_webhook", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "ReceiveTriggerWebhook" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", + "shortName": "receive_trigger_webhook" + }, + "description": "Sample for ReceiveTriggerWebhook", + "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.retry_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RetryBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "retry_build" + }, + "description": "Sample for RetryBuild", + "file": "cloudbuild_v1_generated_cloud_build_retry_build_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_retry_build_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.retry_build", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RetryBuild" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "retry_build" + }, + "description": "Sample for RetryBuild", + "file": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.run_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RunBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "source", + "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "run_build_trigger" + }, + "description": "Sample for RunBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.run_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "RunBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "source", + "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "run_build_trigger" + }, + "description": "Sample for RunBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "update_build_trigger" + }, + "description": "Sample for UpdateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_build_trigger", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateBuildTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", + "shortName": "update_build_trigger" + }, + "description": "Sample for UpdateBuildTrigger", + "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_worker_pool" + }, + "description": "Sample for UpdateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_worker_pool", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "UpdateWorkerPool" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" + }, + { + "name": "worker_pool", + "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_worker_pool" + }, + "description": "Sample for UpdateWorkerPool", + "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py new file mode 100644 index 00000000..78aabad9 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py @@ -0,0 +1,193 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudbuildCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'approve_build': ('name', 'approval_result', ), + 'cancel_build': ('project_id', 'id', 'name', ), + 'create_build': ('project_id', 'build', 'parent', ), + 'create_build_trigger': ('project_id', 'trigger', 'parent', ), + 'create_worker_pool': ('parent', 'worker_pool', 'worker_pool_id', 'validate_only', ), + 'delete_build_trigger': ('project_id', 'trigger_id', 'name', ), + 'delete_worker_pool': ('name', 'etag', 'allow_missing', 'validate_only', ), + 'get_build': ('project_id', 'id', 'name', ), + 'get_build_trigger': ('project_id', 'trigger_id', 'name', ), + 'get_worker_pool': ('name', ), + 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), + 'list_build_triggers': ('project_id', 'parent', 'page_size', 'page_token', ), + 'list_worker_pools': ('parent', 'page_size', 'page_token', ), + 'receive_trigger_webhook': ('name', 'body', 'project_id', 'trigger', 'secret', ), + 'retry_build': ('project_id', 'id', 'name', ), + 'run_build_trigger': ('project_id', 'trigger_id', 'name', 'source', ), + 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), + 'update_worker_pool': ('worker_pool', 'update_mask', 'validate_only', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudbuildCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudbuild client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 00000000..49cab02d --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-build' + + +description = "Google Cloud Build API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-build" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud", "google.cloud.devtools"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py new file mode 100644 index 00000000..eb31221d --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -0,0 +1,10280 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudBuildClient._get_default_mtls_endpoint(None) is None + assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudBuildClient, "grpc"), + (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), +]) +def test_cloud_build_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CloudBuildGrpcTransport, "grpc"), + (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudBuildRestTransport, "rest"), +]) +def test_cloud_build_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudBuildClient, "grpc"), + (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), +]) +def test_cloud_build_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +def test_cloud_build_client_get_transport_class(): + transport = CloudBuildClient.get_transport_class() + available_transports = [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildRestTransport, + ] + assert transport in available_transports + + transport = CloudBuildClient.get_transport_class("grpc") + assert transport == transports.CloudBuildGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +def test_cloud_build_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_build_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CloudBuildClient, CloudBuildAsyncClient +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), +]) +def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), +]) +def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_cloud_build_client_client_options_from_dict(): + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudBuildClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cloud_build_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildRequest, + dict, +]) +def test_create_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + client.create_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + +@pytest.mark.asyncio +async def test_create_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_build_async_from_dict(): + await test_create_build_async(request_type=dict) + +def test_create_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateBuildRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_build( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].build + mock_val = cloudbuild.Build(name='name_value') + assert arg == mock_val + + +def test_create_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_build( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].build + mock_val = cloudbuild.Build(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildRequest, + dict, +]) +def test_get_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + response = client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_get_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + client.get_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + +@pytest.mark.asyncio +async def test_get_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + )) + response = await client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_build_async_from_dict(): + await test_get_build_async(request_type=dict) + +def test_get_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + call.return_value = cloudbuild.Build() + client.get_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_get_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_get_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildsRequest, + dict, +]) +def test_list_builds(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_builds_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + client.list_builds() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + +@pytest.mark.asyncio +async def test_list_builds_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildsRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_builds_async_from_dict(): + await test_list_builds_async(request_type=dict) + +def test_list_builds_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListBuildsRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + call.return_value = cloudbuild.ListBuildsResponse() + client.list_builds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_builds_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_builds( + project_id='project_id_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_list_builds_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_list_builds_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_builds( + project_id='project_id_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_builds_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + + +def test_list_builds_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_builds(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in results) +def test_list_builds_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + pages = list(client.list_builds(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_builds_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_builds(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_builds_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_builds(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CancelBuildRequest, + dict, +]) +def test_cancel_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + response = client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_cancel_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + client.cancel_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + +@pytest.mark.asyncio +async def test_cancel_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CancelBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + )) + response = await client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CancelBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_cancel_build_async_from_dict(): + await test_cancel_build_async(request_type=dict) + +def test_cancel_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CancelBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + call.return_value = cloudbuild.Build() + client.cancel_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_cancel_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_cancel_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_cancel_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.Build() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_cancel_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RetryBuildRequest, + dict, +]) +def test_retry_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_retry_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + client.retry_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + +@pytest.mark.asyncio +async def test_retry_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RetryBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RetryBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_retry_build_async_from_dict(): + await test_retry_build_async(request_type=dict) + +def test_retry_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.RetryBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.retry_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_retry_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.retry_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + + +def test_retry_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + +@pytest.mark.asyncio +async def test_retry_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retry_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.retry_build( + project_id='project_id_value', + id='id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].id + mock_val = 'id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_retry_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ApproveBuildRequest, + dict, +]) +def test_approve_build(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_approve_build_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + client.approve_build() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + +@pytest.mark.asyncio +async def test_approve_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ApproveBuildRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ApproveBuildRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_approve_build_async_from_dict(): + await test_approve_build_async(request_type=dict) + +def test_approve_build_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ApproveBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.approve_build(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_approve_build_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.approve_build( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].approval_result + mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') + assert arg == mock_val + + +def test_approve_build_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + +@pytest.mark.asyncio +async def test_approve_build_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.approve_build), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.approve_build( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].approval_result + mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_approve_build_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildTriggerRequest, + dict, +]) +def test_create_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_create_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + client.create_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_create_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_create_build_trigger_async_from_dict(): + await test_create_build_trigger_async(request_type=dict) + +def test_create_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateBuildTriggerRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.create_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_build_trigger( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + + +def test_create_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + +@pytest.mark.asyncio +async def test_create_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_build_trigger( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildTriggerRequest, + dict, +]) +def test_get_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_get_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + client.get_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_get_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_get_build_trigger_async_from_dict(): + await test_get_build_trigger_async(request_type=dict) + +def test_get_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.get_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + + +def test_get_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_get_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildTriggersRequest, + dict, +]) +def test_list_build_triggers(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_build_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + client.list_build_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + +@pytest.mark.asyncio +async def test_list_build_triggers_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildTriggersRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListBuildTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_build_triggers_async_from_dict(): + await test_list_build_triggers_async(request_type=dict) + +def test_list_build_triggers_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListBuildTriggersRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + call.return_value = cloudbuild.ListBuildTriggersResponse() + client.list_build_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_build_triggers_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_build_triggers( + project_id='project_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + + +def test_list_build_triggers_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + +@pytest.mark.asyncio +async def test_list_build_triggers_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListBuildTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_build_triggers( + project_id='project_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_build_triggers_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + + +def test_list_build_triggers_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_build_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in results) +def test_list_build_triggers_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_build_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_build_triggers_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_build_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_build_triggers_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_build_triggers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteBuildTriggerRequest, + dict, +]) +def test_delete_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + client.delete_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_build_trigger_async_from_dict(): + await test_delete_build_trigger_async(request_type=dict) + +def test_delete_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.DeleteBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + call.return_value = None + client.delete_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_delete_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + + +def test_delete_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_delete_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateBuildTriggerRequest, + dict, +]) +def test_update_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + response = client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_update_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + client.update_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_update_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + )) + response = await client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +@pytest.mark.asyncio +async def test_update_build_trigger_async_from_dict(): + await test_update_build_trigger_async(request_type=dict) + +def test_update_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.UpdateBuildTriggerRequest(**{"trigger": {"resource_name": "projects/sample1/locations/sample2/triggers/sample3"}}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + call.return_value = cloudbuild.BuildTrigger() + client.update_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_update_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + + +def test_update_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + +@pytest.mark.asyncio +async def test_update_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.BuildTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RunBuildTriggerRequest, + dict, +]) +def test_run_build_trigger(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_run_build_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + client.run_build_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + +@pytest.mark.asyncio +async def test_run_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RunBuildTriggerRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.RunBuildTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_run_build_trigger_async_from_dict(): + await test_run_build_trigger_async(request_type=dict) + +def test_run_build_trigger_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.RunBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.run_build_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_run_build_trigger_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].source + mock_val = cloudbuild.RepoSource(project_id='project_id_value') + assert arg == mock_val + + +def test_run_build_trigger_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + +@pytest.mark.asyncio +async def test_run_build_trigger_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_build_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_build_trigger( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = 'project_id_value' + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + arg = args[0].source + mock_val = cloudbuild.RepoSource(project_id='project_id_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_run_build_trigger_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ReceiveTriggerWebhookRequest, + dict, +]) +def test_receive_trigger_webhook(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ReceiveTriggerWebhookResponse( + ) + response = client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +def test_receive_trigger_webhook_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + client.receive_trigger_webhook() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ReceiveTriggerWebhookRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse( + )) + response = await client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_async_from_dict(): + await test_receive_trigger_webhook_async(request_type=dict) + + +def test_receive_trigger_webhook_field_headers(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ReceiveTriggerWebhookRequest() + + request.project_id = 'project_id_value' + request.trigger = 'trigger_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + call.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&trigger=trigger_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_receive_trigger_webhook_field_headers_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ReceiveTriggerWebhookRequest() + + request.project_id = 'project_id_value' + request.trigger = 'trigger_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.receive_trigger_webhook), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse()) + await client.receive_trigger_webhook(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&trigger=trigger_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateWorkerPoolRequest, + dict, +]) +def test_create_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + client.create_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_create_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.CreateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_worker_pool_async_from_dict(): + await test_create_worker_pool_async(request_type=dict) + +def test_create_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.CreateWorkerPoolRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_create_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_worker_pool( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].worker_pool_id + mock_val = 'worker_pool_id_value' + assert arg == mock_val + + +def test_create_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + +@pytest.mark.asyncio +async def test_create_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_worker_pool( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].worker_pool_id + mock_val = 'worker_pool_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetWorkerPoolRequest, + dict, +]) +def test_get_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + ) + response = client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +def test_get_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + client.get_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_get_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + )) + response = await client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_worker_pool_async_from_dict(): + await test_get_worker_pool_async(request_type=dict) + +def test_get_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.GetWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + call.return_value = cloudbuild.WorkerPool() + client.get_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_get_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.WorkerPool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteWorkerPoolRequest, + dict, +]) +def test_delete_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + client.delete_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_worker_pool_async_from_dict(): + await test_delete_worker_pool_async(request_type=dict) + +def test_delete_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.DeleteWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_delete_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_worker_pool( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateWorkerPoolRequest, + dict, +]) +def test_update_worker_pool(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_worker_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + client.update_worker_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + +@pytest.mark.asyncio +async def test_update_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateWorkerPoolRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_worker_pool_async_from_dict(): + await test_update_worker_pool_async(request_type=dict) + +def test_update_worker_pool_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.UpdateWorkerPoolRequest(**{"worker_pool": {"name": "projects/sample1/locations/sample2/workerPools/sample3"}}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_worker_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_update_worker_pool_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_worker_pool( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_worker_pool_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_worker_pool_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_worker_pool), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_worker_pool( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].worker_pool + mock_val = cloudbuild.WorkerPool(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_worker_pool_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListWorkerPoolsRequest, + dict, +]) +def test_list_worker_pools(request_type, transport: str = 'grpc'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_worker_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + client.list_worker_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + +@pytest.mark.asyncio +async def test_list_worker_pools_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListWorkerPoolsRequest): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.ListWorkerPoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_worker_pools_async_from_dict(): + await test_list_worker_pools_async(request_type=dict) + +def test_list_worker_pools_routing_parameters(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudbuild.ListWorkerPoolsRequest(**{"parent": "projects/sample1/locations/sample2"}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + call.return_value = cloudbuild.ListWorkerPoolsResponse() + client.list_worker_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + + +def test_list_worker_pools_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_worker_pools( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_worker_pools_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_worker_pools_flattened_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.ListWorkerPoolsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_worker_pools( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_worker_pools_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + + +def test_list_worker_pools_pager(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_worker_pools(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in results) +def test_list_worker_pools_pages(transport_name: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + pages = list(client.list_worker_pools(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_worker_pools_async_pager(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_worker_pools(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_worker_pools_async_pages(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_worker_pools), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_worker_pools(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildRequest, + dict, +]) +def test_create_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parent", )) & set(("projectId", "build", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.CreateBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build(request) + + +def test_create_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) + + +def test_create_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build( + cloudbuild.CreateBuildRequest(), + project_id='project_id_value', + build=cloudbuild.Build(name='name_value'), + ) + + +def test_create_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildRequest, + dict, +]) +def test_get_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_get_build_rest_required_fields(request_type=cloudbuild.GetBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.GetBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.get_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build(request) + + +def test_get_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}" % client.transport._host, args[1]) + + +def test_get_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build( + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_get_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildsRequest, + dict, +]) +def test_list_builds_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_builds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_builds_rest_required_fields(request_type=cloudbuild.ListBuildsRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", "parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_builds(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_builds_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_builds._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "parent", )) & set(("projectId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_builds_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_builds") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_builds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildsResponse.to_json(cloudbuild.ListBuildsResponse()) + + request = cloudbuild.ListBuildsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildsResponse() + + client.list_builds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_builds_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildsRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_builds(request) + + +def test_list_builds_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_builds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) + + +def test_list_builds_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id='project_id_value', + filter='filter_value', + ) + + +def test_list_builds_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListBuildsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1'} + + pager = client.list_builds(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.Build) + for i in results) + + pages = list(client.list_builds(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CancelBuildRequest, + dict, +]) +def test_cancel_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name='name_value', + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.PENDING, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + service_account='service_account_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.cancel_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == 'name_value' + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == 'status_detail_value' + assert response.images == ['images_value'] + assert response.logs_bucket == 'logs_bucket_value' + assert response.build_trigger_id == 'build_trigger_id_value' + assert response.log_url == 'log_url_value' + assert response.tags == ['tags_value'] + assert response.service_account == 'service_account_value' + + +def test_cancel_build_rest_required_fields(request_type=cloudbuild.CancelBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_cancel_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.cancel_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_cancel_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_cancel_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.CancelBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.cancel_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CancelBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_build(request) + + +def test_cancel_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.cancel_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:cancel" % client.transport._host, args[1]) + + +def test_cancel_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_cancel_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RetryBuildRequest, + dict, +]) +def test_retry_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.retry_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_retry_build_rest_required_fields(request_type=cloudbuild.RetryBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["id"] = 'id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "id" in jsonified_request + assert jsonified_request["id"] == 'id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.retry_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_retry_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.retry_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retry_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_retry_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_retry_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.RetryBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.retry_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retry_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RetryBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retry_build(request) + + +def test_retry_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + id='id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.retry_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:retry" % client.transport._host, args[1]) + + +def test_retry_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id='project_id_value', + id='id_value', + ) + + +def test_retry_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ApproveBuildRequest, + dict, +]) +def test_approve_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/builds/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.approve_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_approve_build_rest_required_fields(request_type=cloudbuild.ApproveBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.approve_build(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_approve_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.approve_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_approve_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_approve_build") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_approve_build") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.ApproveBuildRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.approve_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_approve_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ApproveBuildRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/builds/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.approve_build(request) + + +def test_approve_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/builds/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.approve_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/builds/*}:approve" % client.transport._host, args[1]) + + +def test_approve_build_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_build( + cloudbuild.ApproveBuildRequest(), + name='name_value', + approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), + ) + + +def test_approve_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateBuildTriggerRequest, + dict, +]) +def test_create_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_create_build_trigger_rest_required_fields(request_type=cloudbuild.CreateBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parent", )) & set(("projectId", "trigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildTriggerRequest.pb(cloudbuild.CreateBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.CreateBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.create_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build_trigger(request) + + +def test_create_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) + + +def test_create_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +def test_create_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetBuildTriggerRequest, + dict, +]) +def test_get_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_get_build_trigger_rest_required_fields(request_type=cloudbuild.GetBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildTriggerRequest.pb(cloudbuild.GetBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.GetBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.get_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build_trigger(request) + + +def test_get_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_get_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +def test_get_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListBuildTriggersRequest, + dict, +]) +def test_list_build_triggers_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_build_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_build_triggers_rest_required_fields(request_type=cloudbuild.ListBuildTriggersRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", "parent", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_build_triggers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_build_triggers_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_build_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", "parent", )) & set(("projectId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_build_triggers_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_build_triggers") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_build_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildTriggersRequest.pb(cloudbuild.ListBuildTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildTriggersResponse.to_json(cloudbuild.ListBuildTriggersResponse()) + + request = cloudbuild.ListBuildTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildTriggersResponse() + + client.list_build_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_build_triggers_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildTriggersRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_build_triggers(request) + + +def test_list_build_triggers_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_build_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) + + +def test_list_build_triggers_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', + ) + + +def test_list_build_triggers_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListBuildTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1'} + + pager = client.list_build_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in results) + + pages = list(client.list_build_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteBuildTriggerRequest, + dict, +]) +def test_delete_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_build_trigger_rest_required_fields(request_type=cloudbuild.DeleteBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_build_trigger") as pre: + pre.assert_not_called() + pb_message = cloudbuild.DeleteBuildTriggerRequest.pb(cloudbuild.DeleteBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudbuild.DeleteBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_build_trigger(request) + + +def test_delete_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_delete_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + ) + + +def test_delete_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateBuildTriggerRequest, + dict, +]) +def test_update_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name='resource_name_value', + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + filter='filter_value', + service_account='service_account_value', + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == 'resource_name_value' + assert response.id == 'id_value' + assert response.description == 'description_value' + assert response.name == 'name_value' + assert response.tags == ['tags_value'] + assert response.disabled is True + assert response.ignored_files == ['ignored_files_value'] + assert response.included_files == ['included_files_value'] + assert response.filter == 'filter_value' + assert response.service_account == 'service_account_value' + + +def test_update_build_trigger_rest_required_fields(request_type=cloudbuild.UpdateBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectId", "triggerId", "trigger", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateBuildTriggerRequest.pb(cloudbuild.UpdateBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) + + request = cloudbuild.UpdateBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.update_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_build_trigger(request) + + +def test_update_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) + + +def test_update_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), + ) + + +def test_update_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.RunBuildTriggerRequest, + dict, +]) +def test_run_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.run_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_run_build_trigger_rest_required_fields(request_type=cloudbuild.RunBuildTriggerRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = 'project_id_value' + jsonified_request["triggerId"] = 'trigger_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == 'project_id_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.run_build_trigger(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_run_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.run_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_run_build_trigger") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_run_build_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RunBuildTriggerRequest.pb(cloudbuild.RunBuildTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.RunBuildTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.run_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RunBuildTriggerRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} + request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_build_trigger(request) + + +def test_run_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.run_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}:run" % client.transport._host, args[1]) + + +def test_run_build_trigger_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), + ) + + +def test_run_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ReceiveTriggerWebhookRequest, + dict, +]) +def test_receive_trigger_webhook_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger': 'sample2'} + request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ReceiveTriggerWebhookResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ReceiveTriggerWebhookResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.receive_trigger_webhook(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_receive_trigger_webhook_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb(cloudbuild.ReceiveTriggerWebhookRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ReceiveTriggerWebhookResponse.to_json(cloudbuild.ReceiveTriggerWebhookResponse()) + + request = cloudbuild.ReceiveTriggerWebhookRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + + client.receive_trigger_webhook(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_receive_trigger_webhook_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ReceiveTriggerWebhookRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'trigger': 'sample2'} + request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.receive_trigger_webhook(request) + + +def test_receive_trigger_webhook_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.CreateWorkerPoolRequest, + dict, +]) +def test_create_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_worker_pool_rest_required_fields(request_type=cloudbuild.CreateWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["worker_pool_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "workerPoolId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == request_init["worker_pool_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["workerPoolId"] = 'worker_pool_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", "worker_pool_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == 'worker_pool_id_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_worker_pool(request) + + expected_params = [ + ( + "workerPoolId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", "workerPoolId", )) & set(("parent", "workerPool", "workerPoolId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateWorkerPoolRequest.pb(cloudbuild.CreateWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.CreateWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_worker_pool(request) + + +def test_create_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) + + +def test_create_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent='parent_value', + worker_pool=cloudbuild.WorkerPool(name='name_value'), + worker_pool_id='worker_pool_id_value', + ) + + +def test_create_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.GetWorkerPoolRequest, + dict, +]) +def test_get_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool( + name='name_value', + display_name='display_name_value', + uid='uid_value', + state=cloudbuild.WorkerPool.State.CREATING, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_worker_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.uid == 'uid_value' + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == 'etag_value' + + +def test_get_worker_pool_rest_required_fields(request_type=cloudbuild.GetWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetWorkerPoolRequest.pb(cloudbuild.GetWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.WorkerPool.to_json(cloudbuild.WorkerPool()) + + request = cloudbuild.GetWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.WorkerPool() + + client.get_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_worker_pool(request) + + +def test_get_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_get_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name='name_value', + ) + + +def test_get_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.DeleteWorkerPoolRequest, + dict, +]) +def test_delete_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_worker_pool_rest_required_fields(request_type=cloudbuild.DeleteWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_delete_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.DeleteWorkerPoolRequest.pb(cloudbuild.DeleteWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.DeleteWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_worker_pool(request) + + +def test_delete_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_delete_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name='name_value', + ) + + +def test_delete_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.UpdateWorkerPoolRequest, + dict, +]) +def test_update_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_worker_pool_rest_required_fields(request_type=cloudbuild.UpdateWorkerPoolRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_worker_pool(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("workerPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_worker_pool") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_worker_pool") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateWorkerPoolRequest.pb(cloudbuild.UpdateWorkerPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloudbuild.UpdateWorkerPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateWorkerPoolRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_worker_pool(request) + + +def test_update_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) + + +def test_update_worker_pool_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloudbuild.ListWorkerPoolsRequest, + dict, +]) +def test_list_worker_pools_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_worker_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_worker_pools_rest_required_fields(request_type=cloudbuild.ListWorkerPoolsRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_worker_pools(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_worker_pools_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_worker_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_worker_pools_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_worker_pools") as post, \ + mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_worker_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListWorkerPoolsRequest.pb(cloudbuild.ListWorkerPoolsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListWorkerPoolsResponse.to_json(cloudbuild.ListWorkerPoolsResponse()) + + request = cloudbuild.ListWorkerPoolsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListWorkerPoolsResponse() + + client.list_worker_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_worker_pools_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListWorkerPoolsRequest): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_worker_pools(request) + + +def test_list_worker_pools_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_worker_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) + + +def test_list_worker_pools_rest_flattened_error(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent='parent_value', + ) + + +def test_list_worker_pools_rest_pager(transport: str = 'rest'): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token='abc', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token='def', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token='ghi', + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListWorkerPoolsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_worker_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) + for i in results) + + pages = list(client.list_worker_pools(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudBuildClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudBuildGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = CloudBuildClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudBuildGrpcTransport, + ) + +def test_cloud_build_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_build_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_build', + 'get_build', + 'list_builds', + 'cancel_build', + 'retry_build', + 'approve_build', + 'create_build_trigger', + 'get_build_trigger', + 'list_build_triggers', + 'delete_build_trigger', + 'update_build_trigger', + 'run_build_trigger', + 'receive_trigger_webhook', + 'create_worker_pool', + 'get_worker_pool', + 'delete_worker_pool', + 'update_worker_pool', + 'list_worker_pools', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_build_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cloud_build_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport() + adc.assert_called_once() + + +def test_cloud_build_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudBuildClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + ], +) +def test_cloud_build_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, + ], +) +def test_cloud_build_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudBuildGrpcTransport, grpc_helpers), + (transports.CloudBuildGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_build_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cloud_build_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudBuildRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_build_rest_lro_client(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_build_host_no_port(transport_name): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_build_host_with_port(transport_name): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_build_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudBuildClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudBuildClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_build._session + session2 = client2.transport.create_build._session + assert session1 != session2 + session1 = client1.transport.get_build._session + session2 = client2.transport.get_build._session + assert session1 != session2 + session1 = client1.transport.list_builds._session + session2 = client2.transport.list_builds._session + assert session1 != session2 + session1 = client1.transport.cancel_build._session + session2 = client2.transport.cancel_build._session + assert session1 != session2 + session1 = client1.transport.retry_build._session + session2 = client2.transport.retry_build._session + assert session1 != session2 + session1 = client1.transport.approve_build._session + session2 = client2.transport.approve_build._session + assert session1 != session2 + session1 = client1.transport.create_build_trigger._session + session2 = client2.transport.create_build_trigger._session + assert session1 != session2 + session1 = client1.transport.get_build_trigger._session + session2 = client2.transport.get_build_trigger._session + assert session1 != session2 + session1 = client1.transport.list_build_triggers._session + session2 = client2.transport.list_build_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_build_trigger._session + session2 = client2.transport.delete_build_trigger._session + assert session1 != session2 + session1 = client1.transport.update_build_trigger._session + session2 = client2.transport.update_build_trigger._session + assert session1 != session2 + session1 = client1.transport.run_build_trigger._session + session2 = client2.transport.run_build_trigger._session + assert session1 != session2 + session1 = client1.transport.receive_trigger_webhook._session + session2 = client2.transport.receive_trigger_webhook._session + assert session1 != session2 + session1 = client1.transport.create_worker_pool._session + session2 = client2.transport.create_worker_pool._session + assert session1 != session2 + session1 = client1.transport.get_worker_pool._session + session2 = client2.transport.get_worker_pool._session + assert session1 != session2 + session1 = client1.transport.delete_worker_pool._session + session2 = client2.transport.delete_worker_pool._session + assert session1 != session2 + session1 = client1.transport.update_worker_pool._session + session2 = client2.transport.update_worker_pool._session + assert session1 != session2 + session1 = client1.transport.list_worker_pools._session + session2 = client2.transport.list_worker_pools._session + assert session1 != session2 +def test_cloud_build_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudBuildGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_build_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudBuildGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) +def test_cloud_build_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_build_grpc_lro_client(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_build_grpc_lro_async_client(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_build_path(): + project = "squid" + build = "clam" + expected = "projects/{project}/builds/{build}".format(project=project, build=build, ) + actual = CloudBuildClient.build_path(project, build) + assert expected == actual + + +def test_parse_build_path(): + expected = { + "project": "whelk", + "build": "octopus", + } + path = CloudBuildClient.build_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_path(path) + assert expected == actual + +def test_build_trigger_path(): + project = "oyster" + trigger = "nudibranch" + expected = "projects/{project}/triggers/{trigger}".format(project=project, trigger=trigger, ) + actual = CloudBuildClient.build_trigger_path(project, trigger) + assert expected == actual + + +def test_parse_build_trigger_path(): + expected = { + "project": "cuttlefish", + "trigger": "mussel", + } + path = CloudBuildClient.build_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_trigger_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "winkle" + location = "nautilus" + keyring = "scallop" + key = "abalone" + expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) + actual = CloudBuildClient.crypto_key_path(project, location, keyring, key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "squid", + "location": "clam", + "keyring": "whelk", + "key": "octopus", + } + path = CloudBuildClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_crypto_key_path(path) + assert expected == actual + +def test_network_path(): + project = "oyster" + network = "nudibranch" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = CloudBuildClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "cuttlefish", + "network": "mussel", + } + path = CloudBuildClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_network_path(path) + assert expected == actual + +def test_repository_path(): + project = "winkle" + location = "nautilus" + connection = "scallop" + repository = "abalone" + expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + actual = CloudBuildClient.repository_path(project, location, connection, repository) + assert expected == actual + + +def test_parse_repository_path(): + expected = { + "project": "squid", + "location": "clam", + "connection": "whelk", + "repository": "octopus", + } + path = CloudBuildClient.repository_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_repository_path(path) + assert expected == actual + +def test_secret_version_path(): + project = "oyster" + secret = "nudibranch" + version = "cuttlefish" + expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + actual = CloudBuildClient.secret_version_path(project, secret, version) + assert expected == actual + + +def test_parse_secret_version_path(): + expected = { + "project": "mussel", + "secret": "winkle", + "version": "nautilus", + } + path = CloudBuildClient.secret_version_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_secret_version_path(path) + assert expected == actual + +def test_service_account_path(): + project = "scallop" + service_account = "abalone" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = CloudBuildClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "squid", + "service_account": "clam", + } + path = CloudBuildClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_service_account_path(path) + assert expected == actual + +def test_subscription_path(): + project = "whelk" + subscription = "octopus" + expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + actual = CloudBuildClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "oyster", + "subscription": "nudibranch", + } + path = CloudBuildClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_subscription_path(path) + assert expected == actual + +def test_topic_path(): + project = "cuttlefish" + topic = "mussel" + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + actual = CloudBuildClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "winkle", + "topic": "nautilus", + } + path = CloudBuildClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_topic_path(path) + assert expected == actual + +def test_worker_pool_path(): + project = "scallop" + location = "abalone" + worker_pool = "squid" + expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) + actual = CloudBuildClient.worker_pool_path(project, location, worker_pool) + assert expected == actual + + +def test_parse_worker_pool_path(): + expected = { + "project": "clam", + "location": "whelk", + "worker_pool": "octopus", + } + path = CloudBuildClient.worker_pool_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_worker_pool_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudBuildClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = CloudBuildClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudBuildClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = CloudBuildClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudBuildClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = CloudBuildClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = CloudBuildClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = CloudBuildClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudBuildClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = CloudBuildClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudBuildClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudBuildAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..a0cf72db --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/devtools/cloudbuild/__init__.py + google/cloud/devtools/cloudbuild/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..6f731ec0 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/devtools/cloudbuild *.py +recursive-include google/cloud/devtools/cloudbuild_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..c788a1b3 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Devtools Cloudbuild API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Devtools Cloudbuild API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst new file mode 100644 index 00000000..f4d9c5e2 --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst @@ -0,0 +1,10 @@ +RepositoryManager +----------------------------------- + +.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst new file mode 100644 index 00000000..c055be1a --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Devtools Cloudbuild v2 API +==================================================== +.. toctree:: + :maxdepth: 2 + + repository_manager diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst new file mode 100644 index 00000000..2148aa78 --- /dev/null +++ b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Devtools Cloudbuild v2 API +================================================= + +.. automodule:: google.cloud.devtools.cloudbuild_v2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..4bd8e2dd --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-build documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-build" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Devtools Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-build-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-build.tex", + u"google-cloud-build Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-build", + u"Google Cloud Devtools Cloudbuild Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-build", + u"google-cloud-build Documentation", + author, + "google-cloud-build", + "GAPIC library for Google Cloud Devtools Cloudbuild API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..476758ee --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + cloudbuild_v2/services + cloudbuild_v2/types diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py new file mode 100644 index 00000000..47a5d13c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.devtools.cloudbuild_v2.services.repository_manager.client import RepositoryManagerClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager.async_client import RepositoryManagerAsyncClient + +from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import OperationMetadata +from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import RunWorkflowCustomOperationMetadata +from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import Connection +from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import GetConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import GetRepositoryRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubEnterpriseConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import GitLabConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import InstallationState +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesResponse +from google.cloud.devtools.cloudbuild_v2.types.repositories import OAuthCredential +from google.cloud.devtools.cloudbuild_v2.types.repositories import ProcessWebhookRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import Repository +from google.cloud.devtools.cloudbuild_v2.types.repositories import ServiceDirectoryConfig +from google.cloud.devtools.cloudbuild_v2.types.repositories import UpdateConnectionRequest +from google.cloud.devtools.cloudbuild_v2.types.repositories import UserCredential + +__all__ = ('RepositoryManagerClient', + 'RepositoryManagerAsyncClient', + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'Connection', + 'CreateConnectionRequest', + 'CreateRepositoryRequest', + 'DeleteConnectionRequest', + 'DeleteRepositoryRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenRequest', + 'FetchReadWriteTokenResponse', + 'GetConnectionRequest', + 'GetRepositoryRequest', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'InstallationState', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'OAuthCredential', + 'ProcessWebhookRequest', + 'Repository', + 'ServiceDirectoryConfig', + 'UpdateConnectionRequest', + 'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py new file mode 100644 index 00000000..6745dc72 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.repository_manager import RepositoryManagerClient +from .services.repository_manager import RepositoryManagerAsyncClient + +from .types.cloudbuild import OperationMetadata +from .types.cloudbuild import RunWorkflowCustomOperationMetadata +from .types.repositories import BatchCreateRepositoriesRequest +from .types.repositories import BatchCreateRepositoriesResponse +from .types.repositories import Connection +from .types.repositories import CreateConnectionRequest +from .types.repositories import CreateRepositoryRequest +from .types.repositories import DeleteConnectionRequest +from .types.repositories import DeleteRepositoryRequest +from .types.repositories import FetchGitRefsRequest +from .types.repositories import FetchGitRefsResponse +from .types.repositories import FetchLinkableRepositoriesRequest +from .types.repositories import FetchLinkableRepositoriesResponse +from .types.repositories import FetchReadTokenRequest +from .types.repositories import FetchReadTokenResponse +from .types.repositories import FetchReadWriteTokenRequest +from .types.repositories import FetchReadWriteTokenResponse +from .types.repositories import GetConnectionRequest +from .types.repositories import GetRepositoryRequest +from .types.repositories import GitHubConfig +from .types.repositories import GitHubEnterpriseConfig +from .types.repositories import GitLabConfig +from .types.repositories import InstallationState +from .types.repositories import ListConnectionsRequest +from .types.repositories import ListConnectionsResponse +from .types.repositories import ListRepositoriesRequest +from .types.repositories import ListRepositoriesResponse +from .types.repositories import OAuthCredential +from .types.repositories import ProcessWebhookRequest +from .types.repositories import Repository +from .types.repositories import ServiceDirectoryConfig +from .types.repositories import UpdateConnectionRequest +from .types.repositories import UserCredential + +__all__ = ( + 'RepositoryManagerAsyncClient', +'BatchCreateRepositoriesRequest', +'BatchCreateRepositoriesResponse', +'Connection', +'CreateConnectionRequest', +'CreateRepositoryRequest', +'DeleteConnectionRequest', +'DeleteRepositoryRequest', +'FetchGitRefsRequest', +'FetchGitRefsResponse', +'FetchLinkableRepositoriesRequest', +'FetchLinkableRepositoriesResponse', +'FetchReadTokenRequest', +'FetchReadTokenResponse', +'FetchReadWriteTokenRequest', +'FetchReadWriteTokenResponse', +'GetConnectionRequest', +'GetRepositoryRequest', +'GitHubConfig', +'GitHubEnterpriseConfig', +'GitLabConfig', +'InstallationState', +'ListConnectionsRequest', +'ListConnectionsResponse', +'ListRepositoriesRequest', +'ListRepositoriesResponse', +'OAuthCredential', +'OperationMetadata', +'ProcessWebhookRequest', +'Repository', +'RepositoryManagerClient', +'RunWorkflowCustomOperationMetadata', +'ServiceDirectoryConfig', +'UpdateConnectionRequest', +'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json new file mode 100644 index 00000000..2e77ddd0 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.devtools.cloudbuild_v2", + "protoPackage": "google.devtools.cloudbuild.v2", + "schema": "1.0", + "services": { + "RepositoryManager": { + "clients": { + "grpc": { + "libraryClient": "RepositoryManagerClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RepositoryManagerAsyncClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "rest": { + "libraryClient": "RepositoryManagerClient", + "rpcs": { + "BatchCreateRepositories": { + "methods": [ + "batch_create_repositories" + ] + }, + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "CreateRepository": { + "methods": [ + "create_repository" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "DeleteRepository": { + "methods": [ + "delete_repository" + ] + }, + "FetchGitRefs": { + "methods": [ + "fetch_git_refs" + ] + }, + "FetchLinkableRepositories": { + "methods": [ + "fetch_linkable_repositories" + ] + }, + "FetchReadToken": { + "methods": [ + "fetch_read_token" + ] + }, + "FetchReadWriteToken": { + "methods": [ + "fetch_read_write_token" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py new file mode 100644 index 00000000..360a0d13 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed new file mode 100644 index 00000000..6070c14c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py new file mode 100644 index 00000000..89a37dc9 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py new file mode 100644 index 00000000..4477dbda --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RepositoryManagerClient +from .async_client import RepositoryManagerAsyncClient + +__all__ = ( + 'RepositoryManagerClient', + 'RepositoryManagerAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py new file mode 100644 index 00000000..f0355efe --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py @@ -0,0 +1,2257 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .client import RepositoryManagerClient + + +class RepositoryManagerAsyncClient: + """Manages connections to source code repositories.""" + + _client: RepositoryManagerClient + + DEFAULT_ENDPOINT = RepositoryManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RepositoryManagerClient.DEFAULT_MTLS_ENDPOINT + + connection_path = staticmethod(RepositoryManagerClient.connection_path) + parse_connection_path = staticmethod(RepositoryManagerClient.parse_connection_path) + repository_path = staticmethod(RepositoryManagerClient.repository_path) + parse_repository_path = staticmethod(RepositoryManagerClient.parse_repository_path) + secret_version_path = staticmethod(RepositoryManagerClient.secret_version_path) + parse_secret_version_path = staticmethod(RepositoryManagerClient.parse_secret_version_path) + service_path = staticmethod(RepositoryManagerClient.service_path) + parse_service_path = staticmethod(RepositoryManagerClient.parse_service_path) + common_billing_account_path = staticmethod(RepositoryManagerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(RepositoryManagerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(RepositoryManagerClient.common_folder_path) + parse_common_folder_path = staticmethod(RepositoryManagerClient.parse_common_folder_path) + common_organization_path = staticmethod(RepositoryManagerClient.common_organization_path) + parse_common_organization_path = staticmethod(RepositoryManagerClient.parse_common_organization_path) + common_project_path = staticmethod(RepositoryManagerClient.common_project_path) + parse_common_project_path = staticmethod(RepositoryManagerClient.parse_common_project_path) + common_location_path = staticmethod(RepositoryManagerClient.common_location_path) + parse_common_location_path = staticmethod(RepositoryManagerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerAsyncClient: The constructed client. + """ + return RepositoryManagerClient.from_service_account_info.__func__(RepositoryManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerAsyncClient: The constructed client. + """ + return RepositoryManagerClient.from_service_account_file.__func__(RepositoryManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RepositoryManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RepositoryManagerTransport: + """Returns the transport used by the client instance. + + Returns: + RepositoryManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, RepositoryManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the repository manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.RepositoryManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RepositoryManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_connection(self, + request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[repositories.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]]): + The request object. Message for creating a Connection + parent (:class:`str`): + Required. Project and location where the connection will + be created. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): + Required. The Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (:class:`str`): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. + Allows alphanumeric characters and any of + -._~%!$&'()*+,;=@. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.CreateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_connection(self, + request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Connection: + r"""Gets details of a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]]): + The request object. Message for getting the details of a + Connection. + name (:class:`str`): + Required. The name of the Connection to retrieve. + Format: ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.GetConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connections(self, + request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]]): + The request object. Message for requesting list of + Connections. + parent (:class:`str`): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager: + Message for response to listing + Connections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_connections, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_connection(self, + request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, + *, + connection: Optional[repositories.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]]): + The request object. Message for updating a Connection. + connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): + Required. The Connection to update. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.UpdateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection.name", request.connection.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_connection(self, + request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]]): + The request object. Message for deleting a Connection. + name (:class:`str`): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.DeleteConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_repository(self, + request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + repository: Optional[repositories.Repository] = None, + repository_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]]): + The request object. Message for creating a Repository. + parent (:class:`str`): + Required. The connection to contain + the repository. If the request is part + of a BatchCreateRepositoriesRequest, + this field should be empty or match the + parent specified there. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository (:class:`google.cloud.devtools.cloudbuild_v2.types.Repository`): + Required. The repository to create. + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository_id (:class:`str`): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``repository_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` + A repository associated to a parent connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, repository, repository_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.CreateRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if repository is not None: + request.repository = repository + if repository_id is not None: + request.repository_id = repository_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_repository, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.Repository, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_repositories(self, + request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates multiple repositories inside a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]]): + The request object. Message for creating repositoritories + in batch. + parent (:class:`str`): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field + in the CreateRepositoryRequest messages must either be + empty or match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]`): + Required. The request messages + specifying the repositories to create. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` + Message for response of creating repositories in batch. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.BatchCreateRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_repositories, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + repositories.BatchCreateRepositoriesResponse, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_repository(self, + request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Repository: + r"""Gets details of a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_repository(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]]): + The request object. Message for getting the details of a + Repository. + name (:class:`str`): + Required. The name of the Repository to retrieve. + Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Repository: + A repository associated to a parent + connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.GetRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_repository, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_repositories(self, + request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesAsyncPager: + r"""Lists Repositories in a given connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]]): + The request object. Message for requesting list of + Repositories. + parent (:class:`str`): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager: + Message for response to listing + Repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.ListRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRepositoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_repository(self, + request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]]): + The request object. Message for deleting a Repository. + name (:class:`str`): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.DeleteRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_repository, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + async def fetch_read_write_token(self, + request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]]): + The request object. Message for fetching SCM read/write + token. + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchReadWriteTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_read_write_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_read_token(self, + request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadTokenResponse: + r"""Fetches read token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]]): + The request object. Message for fetching SCM read token. + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchReadTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_read_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_linkable_repositories(self, + request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableRepositoriesAsyncPager: + r"""FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]]): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager: + Response message for + FetchLinkableRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = repositories.FetchLinkableRepositoriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_linkable_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection", request.connection), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchLinkableRepositoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_git_refs(self, + request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchGitRefsResponse: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + async def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_git_refs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]]): + The request object. Request for fetching git refs + repository (:class:`str`): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: + Response for fetching git refs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = repositories.FetchGitRefsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_git_refs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RepositoryManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RepositoryManagerAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py new file mode 100644 index 00000000..79743795 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -0,0 +1,2445 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import RepositoryManagerGrpcTransport +from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .transports.rest import RepositoryManagerRestTransport + + +class RepositoryManagerClientMeta(type): + """Metaclass for the RepositoryManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] + _transport_registry["grpc"] = RepositoryManagerGrpcTransport + _transport_registry["grpc_asyncio"] = RepositoryManagerGrpcAsyncIOTransport + _transport_registry["rest"] = RepositoryManagerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RepositoryManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RepositoryManagerClient(metaclass=RepositoryManagerClientMeta): + """Manages connections to source code repositories.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RepositoryManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RepositoryManagerTransport: + """Returns the transport used by the client instance. + + Returns: + RepositoryManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def connection_path(project: str,location: str,connection: str,) -> str: + """Returns a fully-qualified connection string.""" + return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str,str]: + """Parses a connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def repository_path(project: str,location: str,connection: str,repository: str,) -> str: + """Returns a fully-qualified repository string.""" + return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + + @staticmethod + def parse_repository_path(path: str) -> Dict[str,str]: + """Parses a repository path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def secret_version_path(project: str,secret: str,version: str,) -> str: + """Returns a fully-qualified secret_version string.""" + return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + + @staticmethod + def parse_secret_version_path(path: str) -> Dict[str,str]: + """Parses a secret_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_path(project: str,location: str,namespace: str,service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str,str]: + """Parses a service path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RepositoryManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the repository manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RepositoryManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RepositoryManagerTransport): + # transport is a RepositoryManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_connection(self, + request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[repositories.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]): + The request object. Message for creating a Connection + parent (str): + Required. Project and location where the connection will + be created. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (str): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. + Allows alphanumeric characters and any of + -._~%!$&'()*+,;=@. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.CreateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.CreateConnectionRequest): + request = repositories.CreateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_connection(self, + request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Connection: + r"""Gets details of a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]): + The request object. Message for getting the details of a + Connection. + name (str): + Required. The name of the Connection to retrieve. + Format: ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.GetConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.GetConnectionRequest): + request = repositories.GetConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connections(self, + request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsPager: + r"""Lists Connections in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]): + The request object. Message for requesting list of + Connections. + parent (str): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager: + Message for response to listing + Connections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.ListConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.ListConnectionsRequest): + request = repositories.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_connection(self, + request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, + *, + connection: Optional[repositories.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]): + The request object. Message for updating a Connection. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to update. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.UpdateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.UpdateConnectionRequest): + request = repositories.UpdateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection.name", request.connection.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Connection, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_connection(self, + request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]): + The request object. Message for deleting a Connection. + name (str): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.DeleteConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.DeleteConnectionRequest): + request = repositories.DeleteConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_repository(self, + request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, + *, + parent: Optional[str] = None, + repository: Optional[repositories.Repository] = None, + repository_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]): + The request object. Message for creating a Repository. + parent (str): + Required. The connection to contain + the repository. If the request is part + of a BatchCreateRepositoriesRequest, + this field should be empty or match the + parent specified there. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository (google.cloud.devtools.cloudbuild_v2.types.Repository): + Required. The repository to create. + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + repository_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + + This corresponds to the ``repository_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` + A repository associated to a parent connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, repository, repository_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.CreateRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.CreateRepositoryRequest): + request = repositories.CreateRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if repository is not None: + request.repository = repository + if repository_id is not None: + request.repository_id = repository_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.Repository, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def batch_create_repositories(self, + request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates multiple repositories inside a connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]): + The request object. Message for creating repositoritories + in batch. + parent (str): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field + in the CreateRepositoryRequest messages must either be + empty or match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): + Required. The request messages + specifying the repositories to create. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` + Message for response of creating repositories in batch. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.BatchCreateRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.BatchCreateRepositoriesRequest): + request = repositories.BatchCreateRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + repositories.BatchCreateRepositoriesResponse, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_repository(self, + request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.Repository: + r"""Gets details of a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_repository(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]): + The request object. Message for getting the details of a + Repository. + name (str): + Required. The name of the Repository to retrieve. + Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.Repository: + A repository associated to a parent + connection. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.GetRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.GetRepositoryRequest): + request = repositories.GetRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_repositories(self, + request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesPager: + r"""Lists Repositories in a given connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]): + The request object. Message for requesting list of + Repositories. + parent (str): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager: + Message for response to listing + Repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.ListRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.ListRepositoriesRequest): + request = repositories.ListRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRepositoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_repository(self, + request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]): + The request object. Message for deleting a Repository. + name (str): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.DeleteRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.DeleteRepositoryRequest): + request = repositories.DeleteRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloudbuild.OperationMetadata, + ) + + # Done; return the response. + return response + + def fetch_read_write_token(self, + request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Fetches read/write token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]): + The request object. Message for fetching SCM read/write + token. + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchReadWriteTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchReadWriteTokenRequest): + request = repositories.FetchReadWriteTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_write_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_read_token(self, + request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchReadTokenResponse: + r"""Fetches read token of a given repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]): + The request object. Message for fetching SCM read token. + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchReadTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchReadTokenRequest): + request = repositories.FetchReadTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_read_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_linkable_repositories(self, + request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchLinkableRepositoriesPager: + r"""FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager: + Response message for + FetchLinkableRepositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchLinkableRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchLinkableRepositoriesRequest): + request = repositories.FetchLinkableRepositoriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_linkable_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("connection", request.connection), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchLinkableRepositoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_git_refs(self, + request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, + *, + repository: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repositories.FetchGitRefsResponse: + r"""Fetch the list of branches or tags for a given + repository. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v2 + + def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_git_refs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]): + The request object. Request for fetching git refs + repository (str): + Required. The resource name of the repository in the + format + ``projects/*/locations/*/connections/*/repositories/*``. + + This corresponds to the ``repository`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: + Response for fetching git refs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([repository]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a repositories.FetchGitRefsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repositories.FetchGitRefsRequest): + request = repositories.FetchGitRefsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if repository is not None: + request.repository = repository + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_git_refs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("repository", request.repository), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RepositoryManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RepositoryManagerClient", +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py new file mode 100644 index 00000000..1ae879b6 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.devtools.cloudbuild_v2.types import repositories + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.ListConnectionsResponse], + request: repositories.ListConnectionsRequest, + response: repositories.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.ListConnectionsResponse]], + request: repositories.ListConnectionsRequest, + response: repositories.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRepositoriesPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.ListRepositoriesResponse], + request: repositories.ListRepositoriesRequest, + response: repositories.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Repository]: + for page in self.pages: + yield from page.repositories + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListRepositoriesAsyncPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.ListRepositoriesResponse]], + request: repositories.ListRepositoriesRequest, + response: repositories.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Repository]: + async def async_generator(): + async for page in self.pages: + for response in page.repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchLinkableRepositoriesPager: + """A pager for iterating through ``fetch_linkable_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchLinkableRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., repositories.FetchLinkableRepositoriesResponse], + request: repositories.FetchLinkableRepositoriesRequest, + response: repositories.FetchLinkableRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.FetchLinkableRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[repositories.FetchLinkableRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[repositories.Repository]: + for page in self.pages: + yield from page.repositories + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class FetchLinkableRepositoriesAsyncPager: + """A pager for iterating through ``fetch_linkable_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchLinkableRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[repositories.FetchLinkableRepositoriesResponse]], + request: repositories.FetchLinkableRepositoriesRequest, + response: repositories.FetchLinkableRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): + The initial request object. + response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repositories.FetchLinkableRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[repositories.FetchLinkableRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[repositories.Repository]: + async def async_generator(): + async for page in self.pages: + for response in page.repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py new file mode 100644 index 00000000..b912a799 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RepositoryManagerTransport +from .grpc import RepositoryManagerGrpcTransport +from .grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport +from .rest import RepositoryManagerRestTransport +from .rest import RepositoryManagerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] +_transport_registry['grpc'] = RepositoryManagerGrpcTransport +_transport_registry['grpc_asyncio'] = RepositoryManagerGrpcAsyncIOTransport +_transport_registry['rest'] = RepositoryManagerRestTransport + +__all__ = ( + 'RepositoryManagerTransport', + 'RepositoryManagerGrpcTransport', + 'RepositoryManagerGrpcAsyncIOTransport', + 'RepositoryManagerRestTransport', + 'RepositoryManagerRestInterceptor', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py new file mode 100644 index 00000000..28b1d7a8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RepositoryManagerTransport(abc.ABC): + """Abstract transport class for RepositoryManager.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudbuild.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_connection: gapic_v1.method.wrap_method( + self.create_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection: gapic_v1.method.wrap_method( + self.get_connection, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection: gapic_v1.method.wrap_method( + self.update_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method.wrap_method( + self.delete_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.create_repository: gapic_v1.method.wrap_method( + self.create_repository, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_create_repositories: gapic_v1.method.wrap_method( + self.batch_create_repositories, + default_timeout=None, + client_info=client_info, + ), + self.get_repository: gapic_v1.method.wrap_method( + self.get_repository, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_repositories: gapic_v1.method.wrap_method( + self.list_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_repository: gapic_v1.method.wrap_method( + self.delete_repository, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_write_token: gapic_v1.method.wrap_method( + self.fetch_read_write_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_read_token: gapic_v1.method.wrap_method( + self.fetch_read_token, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_linkable_repositories: gapic_v1.method.wrap_method( + self.fetch_linkable_repositories, + default_retry=retries.Retry( +initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_git_refs: gapic_v1.method.wrap_method( + self.fetch_git_refs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + Union[ + repositories.Connection, + Awaitable[repositories.Connection] + ]]: + raise NotImplementedError() + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + Union[ + repositories.ListConnectionsResponse, + Awaitable[repositories.ListConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + Union[ + repositories.Repository, + Awaitable[repositories.Repository] + ]]: + raise NotImplementedError() + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + Union[ + repositories.ListRepositoriesResponse, + Awaitable[repositories.ListRepositoriesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + Union[ + repositories.FetchReadWriteTokenResponse, + Awaitable[repositories.FetchReadWriteTokenResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + Union[ + repositories.FetchReadTokenResponse, + Awaitable[repositories.FetchReadTokenResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + Union[ + repositories.FetchLinkableRepositoriesResponse, + Awaitable[repositories.FetchLinkableRepositoriesResponse] + ]]: + raise NotImplementedError() + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + Union[ + repositories.FetchGitRefsResponse, + Awaitable[repositories.FetchGitRefsResponse] + ]]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'RepositoryManagerTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py new file mode 100644 index 00000000..2507acae --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO + + +class RepositoryManagerGrpcTransport(RepositoryManagerTransport): + """gRPC backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create connection method over gRPC. + + Creates a Connection. + + Returns: + Callable[[~.CreateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', + request_serializer=repositories.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + repositories.Connection]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single connection. + + Returns: + Callable[[~.GetConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', + request_serializer=repositories.GetConnectionRequest.serialize, + response_deserializer=repositories.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + repositories.ListConnectionsResponse]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', + request_serializer=repositories.ListConnectionsRequest.serialize, + response_deserializer=repositories.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the update connection method over gRPC. + + Updates a single connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', + request_serializer=repositories.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection'] + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', + request_serializer=repositories.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection'] + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + operations_pb2.Operation]: + r"""Return a callable for the create repository method over gRPC. + + Creates a Repository. + + Returns: + Callable[[~.CreateRepositoryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_repository' not in self._stubs: + self._stubs['create_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', + request_serializer=repositories.CreateRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_repository'] + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the batch create repositories method over gRPC. + + Creates multiple repositories inside a connection. + + Returns: + Callable[[~.BatchCreateRepositoriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_repositories' not in self._stubs: + self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', + request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_create_repositories'] + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + repositories.Repository]: + r"""Return a callable for the get repository method over gRPC. + + Gets details of a single repository. + + Returns: + Callable[[~.GetRepositoryRequest], + ~.Repository]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_repository' not in self._stubs: + self._stubs['get_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', + request_serializer=repositories.GetRepositoryRequest.serialize, + response_deserializer=repositories.Repository.deserialize, + ) + return self._stubs['get_repository'] + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + repositories.ListRepositoriesResponse]: + r"""Return a callable for the list repositories method over gRPC. + + Lists Repositories in a given connection. + + Returns: + Callable[[~.ListRepositoriesRequest], + ~.ListRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_repositories' not in self._stubs: + self._stubs['list_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', + request_serializer=repositories.ListRepositoriesRequest.serialize, + response_deserializer=repositories.ListRepositoriesResponse.deserialize, + ) + return self._stubs['list_repositories'] + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete repository method over gRPC. + + Deletes a single repository. + + Returns: + Callable[[~.DeleteRepositoryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_repository' not in self._stubs: + self._stubs['delete_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', + request_serializer=repositories.DeleteRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_repository'] + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + repositories.FetchReadWriteTokenResponse]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given repository. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + ~.FetchReadWriteTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_write_token' not in self._stubs: + self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', + request_serializer=repositories.FetchReadWriteTokenRequest.serialize, + response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs['fetch_read_write_token'] + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + repositories.FetchReadTokenResponse]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given repository. + + Returns: + Callable[[~.FetchReadTokenRequest], + ~.FetchReadTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_token' not in self._stubs: + self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', + request_serializer=repositories.FetchReadTokenRequest.serialize, + response_deserializer=repositories.FetchReadTokenResponse.deserialize, + ) + return self._stubs['fetch_read_token'] + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + repositories.FetchLinkableRepositoriesResponse]: + r"""Return a callable for the fetch linkable repositories method over gRPC. + + FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + Returns: + Callable[[~.FetchLinkableRepositoriesRequest], + ~.FetchLinkableRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_linkable_repositories' not in self._stubs: + self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', + request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, + response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, + ) + return self._stubs['fetch_linkable_repositories'] + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + repositories.FetchGitRefsResponse]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + ~.FetchGitRefsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_git_refs' not in self._stubs: + self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', + request_serializer=repositories.FetchGitRefsRequest.serialize, + response_deserializer=repositories.FetchGitRefsResponse.deserialize, + ) + return self._stubs['fetch_git_refs'] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'RepositoryManagerGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py new file mode 100644 index 00000000..55d562a9 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py @@ -0,0 +1,742 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import RepositoryManagerGrpcTransport + + +class RepositoryManagerGrpcAsyncIOTransport(RepositoryManagerTransport): + """gRPC AsyncIO backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create connection method over gRPC. + + Creates a Connection. + + Returns: + Callable[[~.CreateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_connection' not in self._stubs: + self._stubs['create_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', + request_serializer=repositories.CreateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_connection'] + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + Awaitable[repositories.Connection]]: + r"""Return a callable for the get connection method over gRPC. + + Gets details of a single connection. + + Returns: + Callable[[~.GetConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_connection' not in self._stubs: + self._stubs['get_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', + request_serializer=repositories.GetConnectionRequest.serialize, + response_deserializer=repositories.Connection.deserialize, + ) + return self._stubs['get_connection'] + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + Awaitable[repositories.ListConnectionsResponse]]: + r"""Return a callable for the list connections method over gRPC. + + Lists Connections in a given project and location. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_connections' not in self._stubs: + self._stubs['list_connections'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', + request_serializer=repositories.ListConnectionsRequest.serialize, + response_deserializer=repositories.ListConnectionsResponse.deserialize, + ) + return self._stubs['list_connections'] + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update connection method over gRPC. + + Updates a single connection. + + Returns: + Callable[[~.UpdateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_connection' not in self._stubs: + self._stubs['update_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', + request_serializer=repositories.UpdateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_connection'] + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes a single connection. + + Returns: + Callable[[~.DeleteConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_connection' not in self._stubs: + self._stubs['delete_connection'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', + request_serializer=repositories.DeleteConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_connection'] + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create repository method over gRPC. + + Creates a Repository. + + Returns: + Callable[[~.CreateRepositoryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_repository' not in self._stubs: + self._stubs['create_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', + request_serializer=repositories.CreateRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_repository'] + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the batch create repositories method over gRPC. + + Creates multiple repositories inside a connection. + + Returns: + Callable[[~.BatchCreateRepositoriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_repositories' not in self._stubs: + self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', + request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['batch_create_repositories'] + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + Awaitable[repositories.Repository]]: + r"""Return a callable for the get repository method over gRPC. + + Gets details of a single repository. + + Returns: + Callable[[~.GetRepositoryRequest], + Awaitable[~.Repository]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_repository' not in self._stubs: + self._stubs['get_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', + request_serializer=repositories.GetRepositoryRequest.serialize, + response_deserializer=repositories.Repository.deserialize, + ) + return self._stubs['get_repository'] + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + Awaitable[repositories.ListRepositoriesResponse]]: + r"""Return a callable for the list repositories method over gRPC. + + Lists Repositories in a given connection. + + Returns: + Callable[[~.ListRepositoriesRequest], + Awaitable[~.ListRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_repositories' not in self._stubs: + self._stubs['list_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', + request_serializer=repositories.ListRepositoriesRequest.serialize, + response_deserializer=repositories.ListRepositoriesResponse.deserialize, + ) + return self._stubs['list_repositories'] + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete repository method over gRPC. + + Deletes a single repository. + + Returns: + Callable[[~.DeleteRepositoryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_repository' not in self._stubs: + self._stubs['delete_repository'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', + request_serializer=repositories.DeleteRepositoryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_repository'] + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + Awaitable[repositories.FetchReadWriteTokenResponse]]: + r"""Return a callable for the fetch read write token method over gRPC. + + Fetches read/write token of a given repository. + + Returns: + Callable[[~.FetchReadWriteTokenRequest], + Awaitable[~.FetchReadWriteTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_write_token' not in self._stubs: + self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', + request_serializer=repositories.FetchReadWriteTokenRequest.serialize, + response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, + ) + return self._stubs['fetch_read_write_token'] + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + Awaitable[repositories.FetchReadTokenResponse]]: + r"""Return a callable for the fetch read token method over gRPC. + + Fetches read token of a given repository. + + Returns: + Callable[[~.FetchReadTokenRequest], + Awaitable[~.FetchReadTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_read_token' not in self._stubs: + self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', + request_serializer=repositories.FetchReadTokenRequest.serialize, + response_deserializer=repositories.FetchReadTokenResponse.deserialize, + ) + return self._stubs['fetch_read_token'] + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + Awaitable[repositories.FetchLinkableRepositoriesResponse]]: + r"""Return a callable for the fetch linkable repositories method over gRPC. + + FetchLinkableRepositories get repositories from SCM + that are accessible and could be added to the + connection. + + Returns: + Callable[[~.FetchLinkableRepositoriesRequest], + Awaitable[~.FetchLinkableRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_linkable_repositories' not in self._stubs: + self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', + request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, + response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, + ) + return self._stubs['fetch_linkable_repositories'] + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + Awaitable[repositories.FetchGitRefsResponse]]: + r"""Return a callable for the fetch git refs method over gRPC. + + Fetch the list of branches or tags for a given + repository. + + Returns: + Callable[[~.FetchGitRefsRequest], + Awaitable[~.FetchGitRefsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'fetch_git_refs' not in self._stubs: + self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', + request_serializer=repositories.FetchGitRefsRequest.serialize, + response_deserializer=repositories.FetchGitRefsResponse.deserialize, + ) + return self._stubs['fetch_git_refs'] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ( + 'RepositoryManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py new file mode 100644 index 00000000..365c9861 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py @@ -0,0 +1,2275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.longrunning import operations_pb2 # type: ignore + +from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RepositoryManagerRestInterceptor: + """Interceptor for RepositoryManager. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RepositoryManagerRestTransport. + + .. code-block:: python + class MyCustomRepositoryManagerInterceptor(RepositoryManagerRestInterceptor): + def pre_batch_create_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_git_refs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_git_refs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_linkable_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_linkable_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_read_write_token(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_read_write_token(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_repository(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_repository(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_repositories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_repositories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RepositoryManagerRestTransport(interceptor=MyCustomRepositoryManagerInterceptor()) + client = RepositoryManagerClient(transport=transport) + + + """ + def pre_batch_create_repositories(self, request: repositories.BatchCreateRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.BatchCreateRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_create_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_batch_create_repositories(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_create_connection(self, request: repositories.CreateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_create_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_create_repository(self, request: repositories.CreateRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_create_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_delete_connection(self, request: repositories.DeleteConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_delete_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_delete_repository(self, request: repositories.DeleteRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_delete_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_git_refs(self, request: repositories.FetchGitRefsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchGitRefsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_git_refs(self, response: repositories.FetchGitRefsResponse) -> repositories.FetchGitRefsResponse: + """Post-rpc interceptor for fetch_git_refs + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_linkable_repositories(self, request: repositories.FetchLinkableRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchLinkableRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_linkable_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_linkable_repositories(self, response: repositories.FetchLinkableRepositoriesResponse) -> repositories.FetchLinkableRepositoriesResponse: + """Post-rpc interceptor for fetch_linkable_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_read_token(self, request: repositories.FetchReadTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_read_token(self, response: repositories.FetchReadTokenResponse) -> repositories.FetchReadTokenResponse: + """Post-rpc interceptor for fetch_read_token + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_fetch_read_write_token(self, request: repositories.FetchReadWriteTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadWriteTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_fetch_read_write_token(self, response: repositories.FetchReadWriteTokenResponse) -> repositories.FetchReadWriteTokenResponse: + """Post-rpc interceptor for fetch_read_write_token + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_connection(self, request: repositories.GetConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_connection(self, response: repositories.Connection) -> repositories.Connection: + """Post-rpc interceptor for get_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_repository(self, request: repositories.GetRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetRepositoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_repository + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_repository(self, response: repositories.Repository) -> repositories.Repository: + """Post-rpc interceptor for get_repository + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_list_connections(self, request: repositories.ListConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_list_connections(self, response: repositories.ListConnectionsResponse) -> repositories.ListConnectionsResponse: + """Post-rpc interceptor for list_connections + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_list_repositories(self, request: repositories.ListRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListRepositoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_repositories + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_list_repositories(self, response: repositories.ListRepositoriesResponse) -> repositories.ListRepositoriesResponse: + """Post-rpc interceptor for list_repositories + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_update_connection(self, request: repositories.UpdateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_update_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_connection + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions( + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RepositoryManager server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RepositoryManager server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RepositoryManagerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RepositoryManagerRestInterceptor + + +class RepositoryManagerRestTransport(RepositoryManagerTransport): + """REST backend transport for RepositoryManager. + + Manages connections to source code repositories. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RepositoryManagerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RepositoryManagerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("BatchCreateRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.BatchCreateRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the batch create repositories method over HTTP. + + Args: + request (~.repositories.BatchCreateRepositoriesRequest): + The request object. Message for creating repositoritories + in batch. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_batch_create_repositories(request, metadata) + pb_request = repositories.BatchCreateRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_repositories(resp) + return resp + + class _CreateConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("CreateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "connectionId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.CreateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create connection method over HTTP. + + Args: + request (~.repositories.CreateConnectionRequest): + The request object. Message for creating a Connection + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + 'body': 'connection', + }, + ] + request, metadata = self._interceptor.pre_create_connection(request, metadata) + pb_request = repositories.CreateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection(resp) + return resp + + class _CreateRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("CreateRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "repositoryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.CreateRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create repository method over HTTP. + + Args: + request (~.repositories.CreateRepositoryRequest): + The request object. Message for creating a Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', + 'body': 'repository', + }, + ] + request, metadata = self._interceptor.pre_create_repository(request, metadata) + pb_request = repositories.CreateRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_repository(resp) + return resp + + class _DeleteConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("DeleteConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.DeleteConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete connection method over HTTP. + + Args: + request (~.repositories.DeleteConnectionRequest): + The request object. Message for deleting a Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_connection(request, metadata) + pb_request = repositories.DeleteConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_connection(resp) + return resp + + class _DeleteRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("DeleteRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.DeleteRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete repository method over HTTP. + + Args: + request (~.repositories.DeleteRepositoryRequest): + The request object. Message for deleting a Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_repository(request, metadata) + pb_request = repositories.DeleteRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_repository(resp) + return resp + + class _FetchGitRefs(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchGitRefs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchGitRefsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchGitRefsResponse: + r"""Call the fetch git refs method over HTTP. + + Args: + request (~.repositories.FetchGitRefsRequest): + The request object. Request for fetching git refs + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchGitRefsResponse: + Response for fetching git refs + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs', + }, + ] + request, metadata = self._interceptor.pre_fetch_git_refs(request, metadata) + pb_request = repositories.FetchGitRefsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchGitRefsResponse() + pb_resp = repositories.FetchGitRefsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_git_refs(resp) + return resp + + class _FetchLinkableRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchLinkableRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchLinkableRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchLinkableRepositoriesResponse: + r"""Call the fetch linkable + repositories method over HTTP. + + Args: + request (~.repositories.FetchLinkableRepositoriesRequest): + The request object. Request message for + FetchLinkableRepositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchLinkableRepositoriesResponse: + Response message for + FetchLinkableRepositories. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{connection=projects/*/locations/*/connections/*}:fetchLinkableRepositories', + }, + ] + request, metadata = self._interceptor.pre_fetch_linkable_repositories(request, metadata) + pb_request = repositories.FetchLinkableRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchLinkableRepositoriesResponse() + pb_resp = repositories.FetchLinkableRepositoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_linkable_repositories(resp) + return resp + + class _FetchReadToken(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchReadToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchReadTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchReadTokenResponse: + r"""Call the fetch read token method over HTTP. + + Args: + request (~.repositories.FetchReadTokenRequest): + The request object. Message for fetching SCM read token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchReadTokenResponse: + Message for responding to get read + token. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_fetch_read_token(request, metadata) + pb_request = repositories.FetchReadTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchReadTokenResponse() + pb_resp = repositories.FetchReadTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_token(resp) + return resp + + class _FetchReadWriteToken(RepositoryManagerRestStub): + def __hash__(self): + return hash("FetchReadWriteToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.FetchReadWriteTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.FetchReadWriteTokenResponse: + r"""Call the fetch read write token method over HTTP. + + Args: + request (~.repositories.FetchReadWriteTokenRequest): + The request object. Message for fetching SCM read/write + token. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.FetchReadWriteTokenResponse: + Message for responding to get + read/write token. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_fetch_read_write_token(request, metadata) + pb_request = repositories.FetchReadWriteTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.FetchReadWriteTokenResponse() + pb_resp = repositories.FetchReadWriteTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_read_write_token(resp) + return resp + + class _GetConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("GetConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.GetConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.Connection: + r"""Call the get connection method over HTTP. + + Args: + request (~.repositories.GetConnectionRequest): + The request object. Message for getting the details of a + Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.Connection: + A connection to a SCM like GitHub, + GitHub Enterprise, Bitbucket Server or + GitLab. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/connections/*}', + }, + ] + request, metadata = self._interceptor.pre_get_connection(request, metadata) + pb_request = repositories.GetConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.Connection() + pb_resp = repositories.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection(resp) + return resp + + class _GetRepository(RepositoryManagerRestStub): + def __hash__(self): + return hash("GetRepository") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.GetRepositoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.Repository: + r"""Call the get repository method over HTTP. + + Args: + request (~.repositories.GetRepositoryRequest): + The request object. Message for getting the details of a + Repository. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.Repository: + A repository associated to a parent + connection. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', + }, + ] + request, metadata = self._interceptor.pre_get_repository(request, metadata) + pb_request = repositories.GetRepositoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.Repository() + pb_resp = repositories.Repository.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_repository(resp) + return resp + + class _ListConnections(RepositoryManagerRestStub): + def __hash__(self): + return hash("ListConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.ListConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.ListConnectionsResponse: + r"""Call the list connections method over HTTP. + + Args: + request (~.repositories.ListConnectionsRequest): + The request object. Message for requesting list of + Connections. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.ListConnectionsResponse: + Message for response to listing + Connections. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/connections', + }, + ] + request, metadata = self._interceptor.pre_list_connections(request, metadata) + pb_request = repositories.ListConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.ListConnectionsResponse() + pb_resp = repositories.ListConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connections(resp) + return resp + + class _ListRepositories(RepositoryManagerRestStub): + def __hash__(self): + return hash("ListRepositories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.ListRepositoriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> repositories.ListRepositoriesResponse: + r"""Call the list repositories method over HTTP. + + Args: + request (~.repositories.ListRepositoriesRequest): + The request object. Message for requesting list of + Repositories. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.repositories.ListRepositoriesResponse: + Message for response to listing + Repositories. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', + }, + ] + request, metadata = self._interceptor.pre_list_repositories(request, metadata) + pb_request = repositories.ListRepositoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = repositories.ListRepositoriesResponse() + pb_resp = repositories.ListRepositoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_repositories(resp) + return resp + + class _UpdateConnection(RepositoryManagerRestStub): + def __hash__(self): + return hash("UpdateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: repositories.UpdateConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update connection method over HTTP. + + Args: + request (~.repositories.UpdateConnectionRequest): + The request object. Message for updating a Connection. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{connection.name=projects/*/locations/*/connections/*}', + 'body': 'connection', + }, + ] + request, metadata = self._interceptor.pre_update_connection(request, metadata) + pb_request = repositories.UpdateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection(resp) + return resp + + @property + def batch_create_repositories(self) -> Callable[ + [repositories.BatchCreateRepositoriesRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_connection(self) -> Callable[ + [repositories.CreateConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_repository(self) -> Callable[ + [repositories.CreateRepositoryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection(self) -> Callable[ + [repositories.DeleteConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_repository(self) -> Callable[ + [repositories.DeleteRepositoryRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_git_refs(self) -> Callable[ + [repositories.FetchGitRefsRequest], + repositories.FetchGitRefsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchGitRefs(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_linkable_repositories(self) -> Callable[ + [repositories.FetchLinkableRepositoriesRequest], + repositories.FetchLinkableRepositoriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchLinkableRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_token(self) -> Callable[ + [repositories.FetchReadTokenRequest], + repositories.FetchReadTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_read_write_token(self) -> Callable[ + [repositories.FetchReadWriteTokenRequest], + repositories.FetchReadWriteTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReadWriteToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection(self) -> Callable[ + [repositories.GetConnectionRequest], + repositories.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_repository(self) -> Callable[ + [repositories.GetRepositoryRequest], + repositories.Repository]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRepository(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connections(self) -> Callable[ + [repositories.ListConnectionsRequest], + repositories.ListConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_repositories(self) -> Callable[ + [repositories.ListRepositoriesRequest], + repositories.ListRepositoriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRepositories(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection(self) -> Callable[ + [repositories.UpdateConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:getIamPolicy', + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:setIamPolicy', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(RepositoryManagerRestStub): + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:testIamPermissions', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(RepositoryManagerRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(RepositoryManagerRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RepositoryManagerRestTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py new file mode 100644 index 00000000..1df6a863 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudbuild import ( + OperationMetadata, + RunWorkflowCustomOperationMetadata, +) +from .repositories import ( + BatchCreateRepositoriesRequest, + BatchCreateRepositoriesResponse, + Connection, + CreateConnectionRequest, + CreateRepositoryRequest, + DeleteConnectionRequest, + DeleteRepositoryRequest, + FetchGitRefsRequest, + FetchGitRefsResponse, + FetchLinkableRepositoriesRequest, + FetchLinkableRepositoriesResponse, + FetchReadTokenRequest, + FetchReadTokenResponse, + FetchReadWriteTokenRequest, + FetchReadWriteTokenResponse, + GetConnectionRequest, + GetRepositoryRequest, + GitHubConfig, + GitHubEnterpriseConfig, + GitLabConfig, + InstallationState, + ListConnectionsRequest, + ListConnectionsResponse, + ListRepositoriesRequest, + ListRepositoriesResponse, + OAuthCredential, + ProcessWebhookRequest, + Repository, + ServiceDirectoryConfig, + UpdateConnectionRequest, + UserCredential, +) + +__all__ = ( + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'Connection', + 'CreateConnectionRequest', + 'CreateRepositoryRequest', + 'DeleteConnectionRequest', + 'DeleteRepositoryRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenRequest', + 'FetchReadWriteTokenResponse', + 'GetConnectionRequest', + 'GetRepositoryRequest', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'InstallationState', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'OAuthCredential', + 'ProcessWebhookRequest', + 'Repository', + 'ServiceDirectoryConfig', + 'UpdateConnectionRequest', + 'UserCredential', +) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py new file mode 100644 index 00000000..a016f0af --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v2', + manifest={ + 'OperationMetadata', + 'RunWorkflowCustomOperationMetadata', + }, +) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class RunWorkflowCustomOperationMetadata(proto.Message): + r"""Represents the custom metadata of the RunWorkflow + long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + verb (str): + Output only. Name of the verb executed by the + operation. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + target (str): + Output only. Server-defined resource path for + the target of the operation. + pipeline_run_id (str): + Output only. ID of the pipeline run created + by RunWorkflow. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + verb: str = proto.Field( + proto.STRING, + number=3, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=4, + ) + api_version: str = proto.Field( + proto.STRING, + number=5, + ) + target: str = proto.Field( + proto.STRING, + number=6, + ) + pipeline_run_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py new file mode 100644 index 00000000..6d5e147a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py @@ -0,0 +1,1104 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.devtools.cloudbuild.v2', + manifest={ + 'Connection', + 'InstallationState', + 'FetchLinkableRepositoriesRequest', + 'FetchLinkableRepositoriesResponse', + 'GitHubConfig', + 'GitHubEnterpriseConfig', + 'GitLabConfig', + 'ServiceDirectoryConfig', + 'Repository', + 'OAuthCredential', + 'UserCredential', + 'CreateConnectionRequest', + 'GetConnectionRequest', + 'ListConnectionsRequest', + 'ListConnectionsResponse', + 'UpdateConnectionRequest', + 'DeleteConnectionRequest', + 'CreateRepositoryRequest', + 'BatchCreateRepositoriesRequest', + 'BatchCreateRepositoriesResponse', + 'GetRepositoryRequest', + 'ListRepositoriesRequest', + 'ListRepositoriesResponse', + 'DeleteRepositoryRequest', + 'FetchReadWriteTokenRequest', + 'FetchReadTokenRequest', + 'FetchReadTokenResponse', + 'FetchReadWriteTokenResponse', + 'ProcessWebhookRequest', + 'FetchGitRefsRequest', + 'FetchGitRefsResponse', + }, +) + + +class Connection(proto.Message): + r"""A connection to a SCM like GitHub, GitHub Enterprise, + Bitbucket Server or GitLab. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Immutable. The resource name of the connection, in the + format + ``projects/{project}/locations/{location}/connections/{connection_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was updated. + github_config (google.cloud.devtools.cloudbuild_v2.types.GitHubConfig): + Configuration for connections to github.com. + + This field is a member of `oneof`_ ``connection_config``. + github_enterprise_config (google.cloud.devtools.cloudbuild_v2.types.GitHubEnterpriseConfig): + Configuration for connections to an instance + of GitHub Enterprise. + + This field is a member of `oneof`_ ``connection_config``. + gitlab_config (google.cloud.devtools.cloudbuild_v2.types.GitLabConfig): + Configuration for connections to gitlab.com + or an instance of GitLab Enterprise. + + This field is a member of `oneof`_ ``connection_config``. + installation_state (google.cloud.devtools.cloudbuild_v2.types.InstallationState): + Output only. Installation state of the + Connection. + disabled (bool): + If disabled is set to true, functionality is + disabled for this connection. Repository based + API methods and webhooks processing for + repositories in this connection will be + disabled. + reconciling (bool): + Output only. Set to true when the connection + is being set up or updated in the background. + annotations (MutableMapping[str, str]): + Allows clients to store small amounts of + arbitrary data. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + github_config: 'GitHubConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='connection_config', + message='GitHubConfig', + ) + github_enterprise_config: 'GitHubEnterpriseConfig' = proto.Field( + proto.MESSAGE, + number=6, + oneof='connection_config', + message='GitHubEnterpriseConfig', + ) + gitlab_config: 'GitLabConfig' = proto.Field( + proto.MESSAGE, + number=7, + oneof='connection_config', + message='GitLabConfig', + ) + installation_state: 'InstallationState' = proto.Field( + proto.MESSAGE, + number=12, + message='InstallationState', + ) + disabled: bool = proto.Field( + proto.BOOL, + number=13, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=14, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + etag: str = proto.Field( + proto.STRING, + number=16, + ) + + +class InstallationState(proto.Message): + r"""Describes stage and necessary actions to be taken by the + user to complete the installation. Used for GitHub and GitHub + Enterprise based connections. + + Attributes: + stage (google.cloud.devtools.cloudbuild_v2.types.InstallationState.Stage): + Output only. Current step of the installation + process. + message (str): + Output only. Message of what the user should + do next to continue the installation. Empty + string if the installation is already complete. + action_uri (str): + Output only. Link to follow for next action. + Empty string if the installation is already + complete. + """ + class Stage(proto.Enum): + r"""Stage of the installation process. + + Values: + STAGE_UNSPECIFIED (0): + No stage specified. + PENDING_CREATE_APP (1): + Only for GitHub Enterprise. An App creation + has been requested. The user needs to confirm + the creation in their GitHub enterprise host. + PENDING_USER_OAUTH (2): + User needs to authorize the GitHub (or + Enterprise) App via OAuth. + PENDING_INSTALL_APP (3): + User needs to follow the link to install the + GitHub (or Enterprise) App. + COMPLETE (10): + Installation process has been completed. + """ + STAGE_UNSPECIFIED = 0 + PENDING_CREATE_APP = 1 + PENDING_USER_OAUTH = 2 + PENDING_INSTALL_APP = 3 + COMPLETE = 10 + + stage: Stage = proto.Field( + proto.ENUM, + number=1, + enum=Stage, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + action_uri: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableRepositoriesRequest(proto.Message): + r"""Request message for FetchLinkableRepositories. + + Attributes: + connection (str): + Required. The name of the Connection. Format: + ``projects/*/locations/*/connections/*``. + page_size (int): + Number of results to return in the list. + Default to 20. + page_token (str): + Page start. + """ + + connection: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchLinkableRepositoriesResponse(proto.Message): + r"""Response message for FetchLinkableRepositories. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + repositories ready to be created. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GitHubConfig(proto.Message): + r"""Configuration for connections to github.com. + + Attributes: + authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.OAuthCredential): + OAuth credential of the account that + authorized the Cloud Build GitHub App. It is + recommended to use a robot account instead of a + human user account. The OAuth token must be tied + to the Cloud Build GitHub App. + app_installation_id (int): + GitHub App installation id. + """ + + authorizer_credential: 'OAuthCredential' = proto.Field( + proto.MESSAGE, + number=1, + message='OAuthCredential', + ) + app_installation_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +class GitHubEnterpriseConfig(proto.Message): + r"""Configuration for connections to an instance of GitHub + Enterprise. + + Attributes: + host_uri (str): + Required. The URI of the GitHub Enterprise + host this connection is for. + api_key (str): + Required. API Key used for authentication of + webhook events. + app_id (int): + Id of the GitHub App created from the + manifest. + app_slug (str): + The URL-friendly name of the GitHub App. + private_key_secret_version (str): + SecretManager resource containing the private key of the + GitHub App, formatted as + ``projects/*/secrets/*/versions/*``. + webhook_secret_secret_version (str): + SecretManager resource containing the webhook secret of the + GitHub App, formatted as + ``projects/*/secrets/*/versions/*``. + app_installation_id (int): + ID of the installation of the GitHub App. + service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): + Configuration for using Service Directory to + privately connect to a GitHub Enterprise server. + This should only be set if the GitHub Enterprise + server is hosted on-premises and not reachable + by public internet. If this field is left empty, + calls to the GitHub Enterprise server will be + made over the public internet. + ssl_ca (str): + SSL certificate to use for requests to GitHub + Enterprise. + server_version (str): + Output only. GitHub Enterprise version installed at the + host_uri. + """ + + host_uri: str = proto.Field( + proto.STRING, + number=1, + ) + api_key: str = proto.Field( + proto.STRING, + number=12, + ) + app_id: int = proto.Field( + proto.INT64, + number=2, + ) + app_slug: str = proto.Field( + proto.STRING, + number=13, + ) + private_key_secret_version: str = proto.Field( + proto.STRING, + number=4, + ) + webhook_secret_secret_version: str = proto.Field( + proto.STRING, + number=5, + ) + app_installation_id: int = proto.Field( + proto.INT64, + number=9, + ) + service_directory_config: 'ServiceDirectoryConfig' = proto.Field( + proto.MESSAGE, + number=10, + message='ServiceDirectoryConfig', + ) + ssl_ca: str = proto.Field( + proto.STRING, + number=11, + ) + server_version: str = proto.Field( + proto.STRING, + number=14, + ) + + +class GitLabConfig(proto.Message): + r"""Configuration for connections to gitlab.com or an instance of + GitLab Enterprise. + + Attributes: + host_uri (str): + The URI of the GitLab Enterprise host this + connection is for. If not specified, the default + value is https://gitlab.com. + webhook_secret_secret_version (str): + Required. Immutable. SecretManager resource containing the + webhook secret of a GitLab Enterprise project, formatted as + ``projects/*/secrets/*/versions/*``. + read_authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): + Required. A GitLab personal access token with the minimum + ``read_api`` scope access. + authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): + Required. A GitLab personal access token with the ``api`` + scope access. + service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): + Configuration for using Service Directory to + privately connect to a GitLab Enterprise server. + This should only be set if the GitLab Enterprise + server is hosted on-premises and not reachable + by public internet. If this field is left empty, + calls to the GitLab Enterprise server will be + made over the public internet. + ssl_ca (str): + SSL certificate to use for requests to GitLab + Enterprise. + server_version (str): + Output only. Version of the GitLab Enterprise server running + on the ``host_uri``. + """ + + host_uri: str = proto.Field( + proto.STRING, + number=1, + ) + webhook_secret_secret_version: str = proto.Field( + proto.STRING, + number=2, + ) + read_authorizer_credential: 'UserCredential' = proto.Field( + proto.MESSAGE, + number=3, + message='UserCredential', + ) + authorizer_credential: 'UserCredential' = proto.Field( + proto.MESSAGE, + number=4, + message='UserCredential', + ) + service_directory_config: 'ServiceDirectoryConfig' = proto.Field( + proto.MESSAGE, + number=5, + message='ServiceDirectoryConfig', + ) + ssl_ca: str = proto.Field( + proto.STRING, + number=6, + ) + server_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ServiceDirectoryConfig(proto.Message): + r"""ServiceDirectoryConfig represents Service Directory + configuration for a connection. + + Attributes: + service (str): + Required. The Service Directory service name. + Format: + projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Repository(proto.Message): + r"""A repository associated to a parent connection. + + Attributes: + name (str): + Immutable. Resource name of the repository, in the format + ``projects/*/locations/*/connections/*/repositories/*``. + remote_uri (str): + Required. Git Clone HTTPS URI. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Server assigned timestamp for + when the connection was updated. + annotations (MutableMapping[str, str]): + Allows clients to store small amounts of + arbitrary data. + etag (str): + This checksum is computed by the server based + on the value of other fields, and may be sent on + update and delete requests to ensure the client + has an up-to-date value before proceeding. + webhook_id (str): + Output only. External ID of the webhook + created for the repository. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + remote_uri: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + etag: str = proto.Field( + proto.STRING, + number=7, + ) + webhook_id: str = proto.Field( + proto.STRING, + number=8, + ) + + +class OAuthCredential(proto.Message): + r"""Represents an OAuth token of the account that authorized the + Connection, and associated metadata. + + Attributes: + oauth_token_secret_version (str): + A SecretManager resource containing the OAuth token that + authorizes the Cloud Build connection. Format: + ``projects/*/secrets/*/versions/*``. + username (str): + Output only. The username associated to this + token. + """ + + oauth_token_secret_version: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UserCredential(proto.Message): + r"""Represents a personal access token that authorized the + Connection, and associated metadata. + + Attributes: + user_token_secret_version (str): + Required. A SecretManager resource containing the user token + that authorizes the Cloud Build connection. Format: + ``projects/*/secrets/*/versions/*``. + username (str): + Output only. The username associated to this + token. + """ + + user_token_secret_version: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateConnectionRequest(proto.Message): + r"""Message for creating a Connection + + Attributes: + parent (str): + Required. Project and location where the connection will be + created. Format: ``projects/*/locations/*``. + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to create. + connection_id (str): + Required. The ID to use for the Connection, which will + become the final component of the Connection's resource + name. Names must be unique per-project per-location. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=2, + message='Connection', + ) + connection_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetConnectionRequest(proto.Message): + r"""Message for getting the details of a Connection. + + Attributes: + name (str): + Required. The name of the Connection to retrieve. Format: + ``projects/*/locations/*/connections/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionsRequest(proto.Message): + r"""Message for requesting list of Connections. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + Connections. Format: ``projects/*/locations/*``. + page_size (int): + Number of results to return in the list. + page_token (str): + Page start. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConnectionsResponse(proto.Message): + r"""Message for response to listing Connections. + + Attributes: + connections (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Connection]): + The list of Connections. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + connections: MutableSequence['Connection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Connection', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateConnectionRequest(proto.Message): + r"""Message for updating a Connection. + + Attributes: + connection (google.cloud.devtools.cloudbuild_v2.types.Connection): + Required. The Connection to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be updated. + allow_missing (bool): + If set to true, and the connection is not found a new + connection will be created. In this situation + ``update_mask`` is ignored. The creation will succeed only + if the input connection has all the necessary information + (e.g a github_config with both user_oauth_token and + installation_id properties). + etag (str): + The current etag of the connection. + If an etag is provided and does not match the + current etag of the connection, update will be + blocked and an ABORTED error will be returned. + """ + + connection: 'Connection' = proto.Field( + proto.MESSAGE, + number=1, + message='Connection', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteConnectionRequest(proto.Message): + r"""Message for deleting a Connection. + + Attributes: + name (str): + Required. The name of the Connection to delete. Format: + ``projects/*/locations/*/connections/*``. + etag (str): + The current etag of the connection. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + If set, validate the request, but do not + actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CreateRepositoryRequest(proto.Message): + r"""Message for creating a Repository. + + Attributes: + parent (str): + Required. The connection to contain the + repository. If the request is part of a + BatchCreateRepositoriesRequest, this field + should be empty or match the parent specified + there. + repository (google.cloud.devtools.cloudbuild_v2.types.Repository): + Required. The repository to create. + repository_id (str): + Required. The ID to use for the repository, which will + become the final component of the repository's resource + name. This ID should be unique in the connection. Allows + alphanumeric characters and any of -._~%!$&'()*+,;=@. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + repository: 'Repository' = proto.Field( + proto.MESSAGE, + number=2, + message='Repository', + ) + repository_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BatchCreateRepositoriesRequest(proto.Message): + r"""Message for creating repositoritories in batch. + + Attributes: + parent (str): + Required. The connection to contain all the repositories + being created. Format: + projects/\ */locations/*/connections/\* The parent field in + the CreateRepositoryRequest messages must either be empty or + match this field. + requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): + Required. The request messages specifying the + repositories to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence['CreateRepositoryRequest'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='CreateRepositoryRequest', + ) + + +class BatchCreateRepositoriesResponse(proto.Message): + r"""Message for response of creating repositories in batch. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + Repository resources created. + """ + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + + +class GetRepositoryRequest(proto.Message): + r"""Message for getting the details of a Repository. + + Attributes: + name (str): + Required. The name of the Repository to retrieve. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRepositoriesRequest(proto.Message): + r"""Message for requesting list of Repositories. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + Repositories. Format: + ``projects/*/locations/*/connections/*``. + page_size (int): + Number of results to return in the list. + page_token (str): + Page start. + filter (str): + A filter expression that filters resources listed in the + response. Expressions must follow API improvement proposal + `AIP-160 `__. e.g. + ``remote_uri:"https://github.com*"``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListRepositoriesResponse(proto.Message): + r"""Message for response to listing Repositories. + + Attributes: + repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): + The list of Repositories. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + repositories: MutableSequence['Repository'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Repository', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteRepositoryRequest(proto.Message): + r"""Message for deleting a Repository. + + Attributes: + name (str): + Required. The name of the Repository to delete. Format: + ``projects/*/locations/*/connections/*/repositories/*``. + etag (str): + The current etag of the repository. + If an etag is provided and does not match the + current etag of the repository, deletion will be + blocked and an ABORTED error will be returned. + validate_only (bool): + If set, validate the request, but do not + actually post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class FetchReadWriteTokenRequest(proto.Message): + r"""Message for fetching SCM read/write token. + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenRequest(proto.Message): + r"""Message for fetching SCM read token. + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchReadTokenResponse(proto.Message): + r"""Message for responding to get read token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class FetchReadWriteTokenResponse(proto.Message): + r"""Message for responding to get read/write token. + + Attributes: + token (str): + The token content. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Expiration timestamp. Can be empty if unknown + or non-expiring. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ProcessWebhookRequest(proto.Message): + r"""RPC request object accepted by the ProcessWebhook RPC method. + + Attributes: + parent (str): + Required. Project and location where the webhook will be + received. Format: ``projects/*/locations/*``. + body (google.api.httpbody_pb2.HttpBody): + HTTP request body. + webhook_key (str): + Arbitrary additional key to find the maching + repository for a webhook event if needed. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=2, + message=httpbody_pb2.HttpBody, + ) + webhook_key: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchGitRefsRequest(proto.Message): + r"""Request for fetching git refs + + Attributes: + repository (str): + Required. The resource name of the repository in the format + ``projects/*/locations/*/connections/*/repositories/*``. + ref_type (google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest.RefType): + Type of refs to fetch + """ + class RefType(proto.Enum): + r"""Type of refs + + Values: + REF_TYPE_UNSPECIFIED (0): + No type specified. + TAG (1): + To fetch tags. + BRANCH (2): + To fetch branches. + """ + REF_TYPE_UNSPECIFIED = 0 + TAG = 1 + BRANCH = 2 + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + ref_type: RefType = proto.Field( + proto.ENUM, + number=2, + enum=RefType, + ) + + +class FetchGitRefsResponse(proto.Message): + r"""Response for fetching git refs + + Attributes: + ref_names (MutableSequence[str]): + Name of the refs fetched. + """ + + ref_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..89095013 --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/devtools/cloudbuild_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py new file mode 100644 index 00000000..fa9a6929 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py new file mode 100644 index 00000000..cad8baae --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_batch_create_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + requests = cloudbuild_v2.CreateRepositoryRequest() + requests.parent = "parent_value" + requests.repository.remote_uri = "remote_uri_value" + requests.repository_id = "repository_id_value" + + request = cloudbuild_v2.BatchCreateRepositoriesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_repositories(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py new file mode 100644 index 00000000..066f3245 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py new file mode 100644 index 00000000..d393a554 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_create_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.CreateConnectionRequest( + parent="parent_value", + connection_id="connection_id_value", + ) + + # Make the request + operation = client.create_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py new file mode 100644 index 00000000..52aaa857 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py new file mode 100644 index 00000000..eb9a5e29 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_create_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + repository = cloudbuild_v2.Repository() + repository.remote_uri = "remote_uri_value" + + request = cloudbuild_v2.CreateRepositoryRequest( + parent="parent_value", + repository=repository, + repository_id="repository_id_value", + ) + + # Make the request + operation = client.create_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py new file mode 100644 index 00000000..ef37e513 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py new file mode 100644 index 00000000..a18ff650 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_delete_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py new file mode 100644 index 00000000..58a5dac2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py new file mode 100644 index 00000000..f141cb54 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_delete_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.DeleteRepositoryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_repository(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py new file mode 100644 index 00000000..2c639684 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_git_refs(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py new file mode 100644 index 00000000..fde064f0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchGitRefs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_git_refs(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchGitRefsRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_git_refs(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py new file mode 100644 index 00000000..c6c744fc --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchLinkableRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py new file mode 100644 index 00000000..9d422598 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchLinkableRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_linkable_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchLinkableRepositoriesRequest( + connection="connection_value", + ) + + # Make the request + page_result = client.fetch_linkable_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py new file mode 100644 index 00000000..b110edb9 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py new file mode 100644 index 00000000..08680d32 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_read_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py new file mode 100644 index 00000000..f2fab11e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = await client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py new file mode 100644 index 00000000..64062425 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchReadWriteToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_fetch_read_write_token(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.FetchReadWriteTokenRequest( + repository="repository_value", + ) + + # Make the request + response = client.fetch_read_write_token(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py new file mode 100644 index 00000000..cbce2c58 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py new file mode 100644 index 00000000..7da0f760 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_get_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py new file mode 100644 index 00000000..077cd120 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_repository(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py new file mode 100644 index 00000000..4f0bbd72 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRepository +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_get_repository(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.GetRepositoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_repository(request=request) + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py new file mode 100644 index 00000000..78e39000 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py new file mode 100644 index 00000000..b0b6783d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_list_connections(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py new file mode 100644 index 00000000..6140bd1a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py new file mode 100644 index 00000000..b133c8eb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRepositories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_list_repositories(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.ListRepositoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_repositories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py new file mode 100644 index 00000000..792d9cd7 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +async def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py new file mode 100644 index 00000000..f1583940 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v2 + + +def sample_update_connection(): + # Create a client + client = cloudbuild_v2.RepositoryManagerClient() + + # Initialize request argument(s) + request = cloudbuild_v2.UpdateConnectionRequest( + ) + + # Make the request + operation = client.update_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json new file mode 100644 index 00000000..818d3fc2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -0,0 +1,2309 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.devtools.cloudbuild.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-build", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.batch_create_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "BatchCreateRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_repositories" + }, + "description": "Sample for BatchCreateRepositories", + "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.batch_create_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "BatchCreateRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_repositories" + }, + "description": "Sample for BatchCreateRepositories", + "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "cloudbuild_v2_generated_repository_manager_create_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_connection" + }, + "description": "Sample for CreateConnection", + "file": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "repository", + "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" + }, + { + "name": "repository_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_repository" + }, + "description": "Sample for CreateRepository", + "file": "cloudbuild_v2_generated_repository_manager_create_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "CreateRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "repository", + "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" + }, + { + "name": "repository_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_repository" + }, + "description": "Sample for CreateRepository", + "file": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection" + }, + "description": "Sample for DeleteConnection", + "file": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_repository" + }, + "description": "Sample for DeleteRepository", + "file": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "DeleteRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_repository" + }, + "description": "Sample for DeleteRepository", + "file": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_git_refs", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_git_refs", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchGitRefs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", + "shortName": "fetch_git_refs" + }, + "description": "Sample for FetchGitRefs", + "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_linkable_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchLinkableRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager", + "shortName": "fetch_linkable_repositories" + }, + "description": "Sample for FetchLinkableRepositories", + "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_linkable_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchLinkableRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager", + "shortName": "fetch_linkable_repositories" + }, + "description": "Sample for FetchLinkableRepositories", + "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", + "shortName": "fetch_read_token" + }, + "description": "Sample for FetchReadToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_write_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_write_token", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "FetchReadWriteToken" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" + }, + { + "name": "repository", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", + "shortName": "fetch_read_write_token" + }, + "description": "Sample for FetchReadWriteToken", + "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "cloudbuild_v2_generated_repository_manager_get_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", + "shortName": "get_connection" + }, + "description": "Sample for GetConnection", + "file": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", + "shortName": "get_repository" + }, + "description": "Sample for GetRepository", + "file": "cloudbuild_v2_generated_repository_manager_get_repository_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_repository_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_repository", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "GetRepository" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", + "shortName": "get_repository" + }, + "description": "Sample for GetRepository", + "file": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_connections", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "cloudbuild_v2_generated_repository_manager_list_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_connections", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager", + "shortName": "list_connections" + }, + "description": "Sample for ListConnections", + "file": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_repositories", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "ListRepositories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager", + "shortName": "list_repositories" + }, + "description": "Sample for ListRepositories", + "file": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", + "shortName": "RepositoryManagerAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.update_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "cloudbuild_v2_generated_repository_manager_update_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_update_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", + "shortName": "RepositoryManagerClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.update_connection", + "method": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", + "service": { + "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", + "shortName": "RepositoryManager" + }, + "shortName": "UpdateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" + }, + { + "name": "connection", + "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connection" + }, + "description": "Sample for UpdateConnection", + "file": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py" + } + ] +} diff --git a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py new file mode 100644 index 00000000..6df46861 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class cloudbuildCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_repositories': ('parent', 'requests', ), + 'create_connection': ('parent', 'connection', 'connection_id', ), + 'create_repository': ('parent', 'repository', 'repository_id', ), + 'delete_connection': ('name', 'etag', 'validate_only', ), + 'delete_repository': ('name', 'etag', 'validate_only', ), + 'fetch_git_refs': ('repository', 'ref_type', ), + 'fetch_linkable_repositories': ('connection', 'page_size', 'page_token', ), + 'fetch_read_token': ('repository', ), + 'fetch_read_write_token': ('repository', ), + 'get_connection': ('name', ), + 'get_repository': ('name', ), + 'list_connections': ('parent', 'page_size', 'page_token', ), + 'list_repositories': ('parent', 'page_size', 'page_token', 'filter', ), + 'update_connection': ('connection', 'update_mask', 'allow_missing', 'etag', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=cloudbuildCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the cloudbuild client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..525db897 --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-build' + + +description = "Google Cloud Build API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/python-build" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud", "google.cloud.devtools"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt new file mode 100644 index 00000000..2beecf99 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ad3f0fa5 --- /dev/null +++ b/owl-bot-staging/v2/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py new file mode 100644 index 00000000..1b4db446 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py new file mode 100644 index 00000000..786103fe --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -0,0 +1,9596 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerAsyncClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerClient +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers +from google.cloud.devtools.cloudbuild_v2.services.repository_manager import transports +from google.cloud.devtools.cloudbuild_v2.types import cloudbuild +from google.cloud.devtools.cloudbuild_v2.types import repositories +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RepositoryManagerClient._get_default_mtls_endpoint(None) is None + assert RepositoryManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RepositoryManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RepositoryManagerClient, "grpc"), + (RepositoryManagerAsyncClient, "grpc_asyncio"), + (RepositoryManagerClient, "rest"), +]) +def test_repository_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RepositoryManagerGrpcTransport, "grpc"), + (transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RepositoryManagerRestTransport, "rest"), +]) +def test_repository_manager_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RepositoryManagerClient, "grpc"), + (RepositoryManagerAsyncClient, "grpc_asyncio"), + (RepositoryManagerClient, "rest"), +]) +def test_repository_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudbuild.googleapis.com' + ) + + +def test_repository_manager_client_get_transport_class(): + transport = RepositoryManagerClient.get_transport_class() + available_transports = [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerRestTransport, + ] + assert transport in available_transports + + transport = RepositoryManagerClient.get_transport_class("grpc") + assert transport == transports.RepositoryManagerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +def test_repository_manager_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "true"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "false"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "true"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "false"), +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_repository_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RepositoryManagerClient, RepositoryManagerAsyncClient +]) +@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) +@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) +def test_repository_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), +]) +def test_repository_manager_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", None), +]) +def test_repository_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_repository_manager_client_client_options_from_dict(): + with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = RepositoryManagerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_repository_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateConnectionRequest, + dict, +]) +def test_create_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + client.create_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + +@pytest.mark.asyncio +async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_connection_async_from_dict(): + await test_create_connection_async(request_type=dict) + + +def test_create_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].connection_id + mock_val = 'connection_id_value' + assert arg == mock_val + + +def test_create_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].connection_id + mock_val = 'connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetConnectionRequest, + dict, +]) +def test_get_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + ) + response = client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +def test_get_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + client.get_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + +@pytest.mark.asyncio +async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.GetConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + )) + response = await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_connection_async_from_dict(): + await test_get_connection_async(request_type=dict) + + +def test_get_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = repositories.Connection() + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListConnectionsRequest, + dict, +]) +def test_list_connections(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + +@pytest.mark.asyncio +async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=repositories.ListConnectionsRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + + +def test_list_connections_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = repositories.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_connections_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Connection) + for i in results) +def test_list_connections_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Connection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.UpdateConnectionRequest, + dict, +]) +def test_update_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + client.update_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + +@pytest.mark.asyncio +async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.UpdateConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_connection_async_from_dict(): + await test_update_connection_async(request_type=dict) + + +def test_update_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.UpdateConnectionRequest() + + request.connection.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.UpdateConnectionRequest() + + request.connection.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection.name=name_value', + ) in kw['metadata'] + + +def test_update_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].connection + mock_val = repositories.Connection(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteConnectionRequest, + dict, +]) +def test_delete_connection(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + client.delete_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + +@pytest.mark.asyncio +async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteConnectionRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_connection_async_from_dict(): + await test_delete_connection_async(request_type=dict) + + +def test_delete_connection_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_connection_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_connection_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_connection_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_connection_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_connection_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateRepositoryRequest, + dict, +]) +def test_create_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + client.create_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + +@pytest.mark.asyncio +async def test_create_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.CreateRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_repository_async_from_dict(): + await test_create_repository_async(request_type=dict) + + +def test_create_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateRepositoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.CreateRepositoryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_repository( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].repository + mock_val = repositories.Repository(name='name_value') + assert arg == mock_val + arg = args[0].repository_id + mock_val = 'repository_id_value' + assert arg == mock_val + + +def test_create_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + +@pytest.mark.asyncio +async def test_create_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_repository( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].repository + mock_val = repositories.Repository(name='name_value') + assert arg == mock_val + arg = args[0].repository_id + mock_val = 'repository_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.BatchCreateRepositoriesRequest, + dict, +]) +def test_batch_create_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + client.batch_create_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + +@pytest.mark.asyncio +async def test_batch_create_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.BatchCreateRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.BatchCreateRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_repositories_async_from_dict(): + await test_batch_create_repositories_async(request_type=dict) + + +def test_batch_create_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.BatchCreateRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_create_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.BatchCreateRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.batch_create_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_batch_create_repositories_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_repositories( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].requests + mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] + assert arg == mock_val + + +def test_batch_create_repositories_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + +@pytest.mark.asyncio +async def test_batch_create_repositories_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_repositories( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].requests + mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_create_repositories_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetRepositoryRequest, + dict, +]) +def test_get_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + ) + response = client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +def test_get_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + client.get_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + +@pytest.mark.asyncio +async def test_get_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.GetRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + )) + response = await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +@pytest.mark.asyncio +async def test_get_repository_async_from_dict(): + await test_get_repository_async(request_type=dict) + + +def test_get_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + call.return_value = repositories.Repository() + client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.GetRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) + await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.Repository() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListRepositoriesRequest, + dict, +]) +def test_list_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + client.list_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + +@pytest.mark.asyncio +async def test_list_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.ListRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_repositories_async_from_dict(): + await test_list_repositories_async(request_type=dict) + + +def test_list_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + call.return_value = repositories.ListRepositoriesResponse() + client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.ListRepositoriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) + await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_repositories_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_repositories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_repositories_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_repositories_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.ListRepositoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_repositories( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_repositories_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + + +def test_list_repositories_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_repositories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) +def test_list_repositories_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = list(client.list_repositories(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_repositories_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_repositories(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Repository) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_repositories_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_repositories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteRepositoryRequest, + dict, +]) +def test_delete_repository(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + client.delete_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + +@pytest.mark.asyncio +async def test_delete_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteRepositoryRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.DeleteRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_repository_async_from_dict(): + await test_delete_repository_async(request_type=dict) + + +def test_delete_repository_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_repository_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.DeleteRepositoryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_repository_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_repository_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_repository_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_repository), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_repository( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_repository_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadWriteTokenRequest, + dict, +]) +def test_fetch_read_write_token(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse( + token='token_value', + ) + response = client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_write_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + client.fetch_read_write_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadWriteTokenRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse( + token='token_value', + )) + response = await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadWriteTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_async_from_dict(): + await test_fetch_read_write_token_async(request_type=dict) + + +def test_fetch_read_write_token_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadWriteTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + call.return_value = repositories.FetchReadWriteTokenResponse() + client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_read_write_token_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadWriteTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) + await client.fetch_read_write_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_read_write_token_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_write_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_read_write_token_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_write_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadWriteTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_write_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_read_write_token_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadTokenRequest, + dict, +]) +def test_fetch_read_token(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse( + token='token_value', + ) + response = client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + client.fetch_read_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + +@pytest.mark.asyncio +async def test_fetch_read_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadTokenRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse( + token='token_value', + )) + response = await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchReadTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.asyncio +async def test_fetch_read_token_async_from_dict(): + await test_fetch_read_token_async(request_type=dict) + + +def test_fetch_read_token_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + call.return_value = repositories.FetchReadTokenResponse() + client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_read_token_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchReadTokenRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) + await client.fetch_read_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_read_token_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_read_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_read_token_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_read_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchReadTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_read_token( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_read_token_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchLinkableRepositoriesRequest, + dict, +]) +def test_fetch_linkable_repositories(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + ) + response = client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_linkable_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + client.fetch_linkable_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchLinkableRepositoriesRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchLinkableRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_from_dict(): + await test_fetch_linkable_repositories_async(request_type=dict) + + +def test_fetch_linkable_repositories_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchLinkableRepositoriesRequest() + + request.connection = 'connection_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + call.return_value = repositories.FetchLinkableRepositoriesResponse() + client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection=connection_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchLinkableRepositoriesRequest() + + request.connection = 'connection_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse()) + await client.fetch_linkable_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'connection=connection_value', + ) in kw['metadata'] + + +def test_fetch_linkable_repositories_pager(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('connection', ''), + )), + ) + pager = client.fetch_linkable_repositories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) +def test_fetch_linkable_repositories_pages(transport_name: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_linkable_repositories(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_pager(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_linkable_repositories(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repositories.Repository) + for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_linkable_repositories_async_pages(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_linkable_repositories), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_linkable_repositories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + repositories.FetchGitRefsRequest, + dict, +]) +def test_fetch_git_refs(request_type, transport: str = 'grpc'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + ) + response = client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +def test_fetch_git_refs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + client.fetch_git_refs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + +@pytest.mark.asyncio +async def test_fetch_git_refs_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchGitRefsRequest): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + )) + response = await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repositories.FetchGitRefsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +@pytest.mark.asyncio +async def test_fetch_git_refs_async_from_dict(): + await test_fetch_git_refs_async(request_type=dict) + + +def test_fetch_git_refs_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchGitRefsRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + call.return_value = repositories.FetchGitRefsResponse() + client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_fetch_git_refs_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repositories.FetchGitRefsRequest() + + request.repository = 'repository_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) + await client.fetch_git_refs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'repository=repository_value', + ) in kw['metadata'] + + +def test_fetch_git_refs_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_git_refs( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + + +def test_fetch_git_refs_flattened_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_git_refs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = repositories.FetchGitRefsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_git_refs( + repository='repository_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].repository + mock_val = 'repository_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_fetch_git_refs_flattened_error_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateConnectionRequest, + dict, +]) +def test_create_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_connection_rest_required_fields(request_type=repositories.CreateConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "connectionId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == request_init["connection_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["connectionId"] = 'connection_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("connection_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "connectionId" in jsonified_request + assert jsonified_request["connectionId"] == 'connection_id_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_connection(request) + + expected_params = [ + ( + "connectionId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("connectionId", )) & set(("parent", "connection", "connectionId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.CreateConnectionRequest.pb(repositories.CreateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.CreateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection(request) + + +def test_create_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_create_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + repositories.CreateConnectionRequest(), + parent='parent_value', + connection=repositories.Connection(name='name_value'), + connection_id='connection_id_value', + ) + + +def test_create_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetConnectionRequest, + dict, +]) +def test_get_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Connection( + name='name_value', + disabled=True, + reconciling=True, + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Connection) + assert response.name == 'name_value' + assert response.disabled is True + assert response.reconciling is True + assert response.etag == 'etag_value' + + +def test_get_connection_rest_required_fields(request_type=repositories.GetConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.GetConnectionRequest.pb(repositories.GetConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.Connection.to_json(repositories.Connection()) + + request = repositories.GetConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.Connection() + + client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.GetConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection(request) + + +def test_get_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_get_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + repositories.GetConnectionRequest(), + name='name_value', + ) + + +def test_get_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListConnectionsRequest, + dict, +]) +def test_list_connections_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_connections_rest_required_fields(request_type=repositories.ListConnectionsRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_connections(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_connections_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connections_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_connections") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.ListConnectionsRequest.pb(repositories.ListConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.ListConnectionsResponse.to_json(repositories.ListConnectionsResponse()) + + request = repositories.ListConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.ListConnectionsResponse() + + client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connections_rest_bad_request(transport: str = 'rest', request_type=repositories.ListConnectionsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connections(request) + + +def test_list_connections_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) + + +def test_list_connections_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + repositories.ListConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_connections_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + repositories.Connection(), + ], + next_page_token='abc', + ), + repositories.ListConnectionsResponse( + connections=[], + next_page_token='def', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + ], + next_page_token='ghi', + ), + repositories.ListConnectionsResponse( + connections=[ + repositories.Connection(), + repositories.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.ListConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Connection) + for i in results) + + pages = list(client.list_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.UpdateConnectionRequest, + dict, +]) +def test_update_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_connection_rest_required_fields(request_type=repositories.UpdateConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "updateMask", )) & set(("connection", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_update_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_update_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.UpdateConnectionRequest.pb(repositories.UpdateConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.UpdateConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.UpdateConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection(request) + + +def test_update_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{connection.name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_update_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + repositories.UpdateConnectionRequest(), + connection=repositories.Connection(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteConnectionRequest, + dict, +]) +def test_delete_connection_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_connection_rest_required_fields(request_type=repositories.DeleteConnectionRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_connection(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_connection_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_connection") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.DeleteConnectionRequest.pb(repositories.DeleteConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.DeleteConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteConnectionRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection(request) + + +def test_delete_connection_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) + + +def test_delete_connection_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + repositories.DeleteConnectionRequest(), + name='name_value', + ) + + +def test_delete_connection_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.CreateRepositoryRequest, + dict, +]) +def test_create_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_repository(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_repository_rest_required_fields(request_type=repositories.CreateRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["repository_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "repositoryId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == request_init["repository_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["repositoryId"] = 'repository_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("repository_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "repositoryId" in jsonified_request + assert jsonified_request["repositoryId"] == 'repository_id_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_repository(request) + + expected_params = [ + ( + "repositoryId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(("repositoryId", )) & set(("parent", "repository", "repositoryId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.CreateRepositoryRequest.pb(repositories.CreateRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.CreateRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_repository(request) + + +def test_create_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) + + +def test_create_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_repository( + repositories.CreateRepositoryRequest(), + parent='parent_value', + repository=repositories.Repository(name='name_value'), + repository_id='repository_id_value', + ) + + +def test_create_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.BatchCreateRepositoriesRequest, + dict, +]) +def test_batch_create_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_create_repositories(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_repositories_rest_required_fields(request_type=repositories.BatchCreateRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_create_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_create_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_create_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "requests", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_batch_create_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_batch_create_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.BatchCreateRepositoriesRequest.pb(repositories.BatchCreateRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.BatchCreateRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.BatchCreateRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_repositories(request) + + +def test_batch_create_repositories_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.batch_create_repositories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate" % client.transport._host, args[1]) + + +def test_batch_create_repositories_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_repositories( + repositories.BatchCreateRepositoriesRequest(), + parent='parent_value', + requests=[repositories.CreateRepositoryRequest(parent='parent_value')], + ) + + +def test_batch_create_repositories_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.GetRepositoryRequest, + dict, +]) +def test_get_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Repository( + name='name_value', + remote_uri='remote_uri_value', + etag='etag_value', + webhook_id='webhook_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_repository(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.Repository) + assert response.name == 'name_value' + assert response.remote_uri == 'remote_uri_value' + assert response.etag == 'etag_value' + assert response.webhook_id == 'webhook_id_value' + + +def test_get_repository_rest_required_fields(request_type=repositories.GetRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.Repository() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_repository(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.GetRepositoryRequest.pb(repositories.GetRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.Repository.to_json(repositories.Repository()) + + request = repositories.GetRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.Repository() + + client.get_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.GetRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_repository(request) + + +def test_get_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.Repository() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.Repository.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) + + +def test_get_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_repository( + repositories.GetRepositoryRequest(), + name='name_value', + ) + + +def test_get_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.ListRepositoriesRequest, + dict, +]) +def test_list_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_repositories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_repositories_rest_required_fields(request_type=repositories.ListRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.ListRepositoriesRequest.pb(repositories.ListRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.ListRepositoriesResponse.to_json(repositories.ListRepositoriesResponse()) + + request = repositories.ListRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.ListRepositoriesResponse() + + client.list_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.ListRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_repositories(request) + + +def test_list_repositories_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.ListRepositoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_repositories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) + + +def test_list_repositories_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_repositories( + repositories.ListRepositoriesRequest(), + parent='parent_value', + ) + + +def test_list_repositories_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.ListRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.ListRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.ListRepositoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} + + pager = client.list_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) + + pages = list(client.list_repositories(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.DeleteRepositoryRequest, + dict, +]) +def test_delete_repository_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_repository(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_repository_rest_required_fields(request_type=repositories.DeleteRepositoryRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_repository(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_repository_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_repository._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_repository_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_repository") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_repository") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.DeleteRepositoryRequest.pb(repositories.DeleteRepositoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = repositories.DeleteRepositoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteRepositoryRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_repository(request) + + +def test_delete_repository_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_repository(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) + + +def test_delete_repository_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_repository( + repositories.DeleteRepositoryRequest(), + name='name_value', + ) + + +def test_delete_repository_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadWriteTokenRequest, + dict, +]) +def test_fetch_read_write_token_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_read_write_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadWriteTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_write_token_rest_required_fields(request_type=repositories.FetchReadWriteTokenRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_read_write_token(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_read_write_token_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_read_write_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_write_token_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_write_token") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_write_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchReadWriteTokenRequest.pb(repositories.FetchReadWriteTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchReadWriteTokenResponse.to_json(repositories.FetchReadWriteTokenResponse()) + + request = repositories.FetchReadWriteTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchReadWriteTokenResponse() + + client.fetch_read_write_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_write_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadWriteTokenRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_write_token(request) + + +def test_fetch_read_write_token_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadWriteTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_read_write_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken" % client.transport._host, args[1]) + + +def test_fetch_read_write_token_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_write_token( + repositories.FetchReadWriteTokenRequest(), + repository='repository_value', + ) + + +def test_fetch_read_write_token_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchReadTokenRequest, + dict, +]) +def test_fetch_read_token_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_read_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchReadTokenResponse) + assert response.token == 'token_value' + + +def test_fetch_read_token_rest_required_fields(request_type=repositories.FetchReadTokenRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_read_token(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_read_token_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_read_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_read_token_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_token") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchReadTokenRequest.pb(repositories.FetchReadTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchReadTokenResponse.to_json(repositories.FetchReadTokenResponse()) + + request = repositories.FetchReadTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchReadTokenResponse() + + client.fetch_read_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_read_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadTokenRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_read_token(request) + + +def test_fetch_read_token_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchReadTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_read_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken" % client.transport._host, args[1]) + + +def test_fetch_read_token_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_read_token( + repositories.FetchReadTokenRequest(), + repository='repository_value', + ) + + +def test_fetch_read_token_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchLinkableRepositoriesRequest, + dict, +]) +def test_fetch_linkable_repositories_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchLinkableRepositoriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_linkable_repositories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchLinkableRepositoriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_fetch_linkable_repositories_rest_required_fields(request_type=repositories.FetchLinkableRepositoriesRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["connection"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["connection"] = 'connection_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "connection" in jsonified_request + assert jsonified_request["connection"] == 'connection_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchLinkableRepositoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_linkable_repositories(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_linkable_repositories_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_linkable_repositories._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("connection", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_linkable_repositories") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_linkable_repositories") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchLinkableRepositoriesRequest.pb(repositories.FetchLinkableRepositoriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchLinkableRepositoriesResponse.to_json(repositories.FetchLinkableRepositoriesResponse()) + + request = repositories.FetchLinkableRepositoriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchLinkableRepositoriesResponse() + + client.fetch_linkable_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_linkable_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchLinkableRepositoriesRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_linkable_repositories(request) + + +def test_fetch_linkable_repositories_rest_pager(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + repositories.Repository(), + ], + next_page_token='abc', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[], + next_page_token='def', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + ], + next_page_token='ghi', + ), + repositories.FetchLinkableRepositoriesResponse( + repositories=[ + repositories.Repository(), + repositories.Repository(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(repositories.FetchLinkableRepositoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} + + pager = client.fetch_linkable_repositories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, repositories.Repository) + for i in results) + + pages = list(client.fetch_linkable_repositories(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + repositories.FetchGitRefsRequest, + dict, +]) +def test_fetch_git_refs_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse( + ref_names=['ref_names_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.fetch_git_refs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, repositories.FetchGitRefsResponse) + assert response.ref_names == ['ref_names_value'] + + +def test_fetch_git_refs_rest_required_fields(request_type=repositories.FetchGitRefsRequest): + transport_class = transports.RepositoryManagerRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = 'repository_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("ref_type", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == 'repository_value' + + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.fetch_git_refs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_fetch_git_refs_rest_unset_required_fields(): + transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.fetch_git_refs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("refType", )) & set(("repository", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_git_refs_rest_interceptors(null_interceptor): + transport = transports.RepositoryManagerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), + ) + client = RepositoryManagerClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs") as post, \ + mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_git_refs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = repositories.FetchGitRefsRequest.pb(repositories.FetchGitRefsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = repositories.FetchGitRefsResponse.to_json(repositories.FetchGitRefsResponse()) + + request = repositories.FetchGitRefsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = repositories.FetchGitRefsResponse() + + client.fetch_git_refs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_git_refs_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchGitRefsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_git_refs(request) + + +def test_fetch_git_refs_rest_flattened(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = repositories.FetchGitRefsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + repository='repository_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.fetch_git_refs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs" % client.transport._host, args[1]) + + +def test_fetch_git_refs_rest_flattened_error(transport: str = 'rest'): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_git_refs( + repositories.FetchGitRefsRequest(), + repository='repository_value', + ) + + +def test_fetch_git_refs_rest_error(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RepositoryManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RepositoryManagerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RepositoryManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RepositoryManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + transports.RepositoryManagerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = RepositoryManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RepositoryManagerGrpcTransport, + ) + +def test_repository_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RepositoryManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_repository_manager_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RepositoryManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_connection', + 'get_connection', + 'list_connections', + 'update_connection', + 'delete_connection', + 'create_repository', + 'batch_create_repositories', + 'get_repository', + 'list_repositories', + 'delete_repository', + 'fetch_read_write_token', + 'fetch_read_token', + 'fetch_linkable_repositories', + 'fetch_git_refs', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_operation', + 'cancel_operation', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_repository_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RepositoryManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_repository_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RepositoryManagerTransport() + adc.assert_called_once() + + +def test_repository_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RepositoryManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + ], +) +def test_repository_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RepositoryManagerGrpcTransport, + transports.RepositoryManagerGrpcAsyncIOTransport, + transports.RepositoryManagerRestTransport, + ], +) +def test_repository_manager_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RepositoryManagerGrpcTransport, grpc_helpers), + (transports.RepositoryManagerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_repository_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudbuild.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudbuild.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_repository_manager_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RepositoryManagerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_repository_manager_rest_lro_client(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_repository_manager_host_no_port(transport_name): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_repository_manager_host_with_port(transport_name): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudbuild.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudbuild.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_repository_manager_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RepositoryManagerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RepositoryManagerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_connection._session + session2 = client2.transport.create_connection._session + assert session1 != session2 + session1 = client1.transport.get_connection._session + session2 = client2.transport.get_connection._session + assert session1 != session2 + session1 = client1.transport.list_connections._session + session2 = client2.transport.list_connections._session + assert session1 != session2 + session1 = client1.transport.update_connection._session + session2 = client2.transport.update_connection._session + assert session1 != session2 + session1 = client1.transport.delete_connection._session + session2 = client2.transport.delete_connection._session + assert session1 != session2 + session1 = client1.transport.create_repository._session + session2 = client2.transport.create_repository._session + assert session1 != session2 + session1 = client1.transport.batch_create_repositories._session + session2 = client2.transport.batch_create_repositories._session + assert session1 != session2 + session1 = client1.transport.get_repository._session + session2 = client2.transport.get_repository._session + assert session1 != session2 + session1 = client1.transport.list_repositories._session + session2 = client2.transport.list_repositories._session + assert session1 != session2 + session1 = client1.transport.delete_repository._session + session2 = client2.transport.delete_repository._session + assert session1 != session2 + session1 = client1.transport.fetch_read_write_token._session + session2 = client2.transport.fetch_read_write_token._session + assert session1 != session2 + session1 = client1.transport.fetch_read_token._session + session2 = client2.transport.fetch_read_token._session + assert session1 != session2 + session1 = client1.transport.fetch_linkable_repositories._session + session2 = client2.transport.fetch_linkable_repositories._session + assert session1 != session2 + session1 = client1.transport.fetch_git_refs._session + session2 = client2.transport.fetch_git_refs._session + assert session1 != session2 +def test_repository_manager_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RepositoryManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_repository_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RepositoryManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) +def test_repository_manager_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_repository_manager_grpc_lro_client(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_repository_manager_grpc_lro_async_client(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_path(): + project = "squid" + location = "clam" + connection = "whelk" + expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) + actual = RepositoryManagerClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = RepositoryManagerClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_connection_path(path) + assert expected == actual + +def test_repository_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + repository = "nautilus" + expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) + actual = RepositoryManagerClient.repository_path(project, location, connection, repository) + assert expected == actual + + +def test_parse_repository_path(): + expected = { + "project": "scallop", + "location": "abalone", + "connection": "squid", + "repository": "clam", + } + path = RepositoryManagerClient.repository_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_repository_path(path) + assert expected == actual + +def test_secret_version_path(): + project = "whelk" + secret = "octopus" + version = "oyster" + expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) + actual = RepositoryManagerClient.secret_version_path(project, secret, version) + assert expected == actual + + +def test_parse_secret_version_path(): + expected = { + "project": "nudibranch", + "secret": "cuttlefish", + "version": "mussel", + } + path = RepositoryManagerClient.secret_version_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_secret_version_path(path) + assert expected == actual + +def test_service_path(): + project = "winkle" + location = "nautilus" + namespace = "scallop" + service = "abalone" + expected = "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) + actual = RepositoryManagerClient.service_path(project, location, namespace, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "squid", + "location": "clam", + "namespace": "whelk", + "service": "octopus", + } + path = RepositoryManagerClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_service_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RepositoryManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = RepositoryManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = RepositoryManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = RepositoryManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RepositoryManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = RepositoryManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = RepositoryManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = RepositoryManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RepositoryManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = RepositoryManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RepositoryManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: + transport_class = RepositoryManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_cancel_operation(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = RepositoryManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = RepositoryManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport), + (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 89d14282e760d2ac64f4a5e70b5aeb68deb92200 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 12 Jul 2023 17:52:09 +0000 Subject: [PATCH 4/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/cloud_build/client.py | 9 +- owl-bot-staging/v1/.coveragerc | 13 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - .../v1/docs/cloudbuild_v1/cloud_build.rst | 10 - .../v1/docs/cloudbuild_v1/services.rst | 6 - .../v1/docs/cloudbuild_v1/types.rst | 6 - owl-bot-staging/v1/docs/conf.py | 376 - owl-bot-staging/v1/docs/index.rst | 7 - .../cloud/devtools/cloudbuild/__init__.py | 151 - .../devtools/cloudbuild/gapic_version.py | 16 - .../google/cloud/devtools/cloudbuild/py.typed | 2 - .../cloud/devtools/cloudbuild_v1/__init__.py | 152 - .../cloudbuild_v1/gapic_metadata.json | 298 - .../devtools/cloudbuild_v1/gapic_version.py | 16 - .../cloud/devtools/cloudbuild_v1/py.typed | 2 - .../cloudbuild_v1/services/__init__.py | 15 - .../services/cloud_build/__init__.py | 22 - .../services/cloud_build/async_client.py | 2601 ---- .../services/cloud_build/client.py | 2899 ----- .../services/cloud_build/pagers.py | 381 - .../cloud_build/transports/__init__.py | 38 - .../services/cloud_build/transports/base.py | 443 - .../services/cloud_build/transports/grpc.py | 793 -- .../cloud_build/transports/grpc_asyncio.py | 792 -- .../services/cloud_build/transports/rest.py | 2419 ---- .../devtools/cloudbuild_v1/types/__init__.py | 144 - .../cloudbuild_v1/types/cloudbuild.py | 3680 ------ owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...nerated_cloud_build_approve_build_async.py | 56 - ...enerated_cloud_build_approve_build_sync.py | 56 - ...enerated_cloud_build_cancel_build_async.py | 53 - ...generated_cloud_build_cancel_build_sync.py | 53 - ...enerated_cloud_build_create_build_async.py | 56 - ...generated_cloud_build_create_build_sync.py | 56 - ..._cloud_build_create_build_trigger_async.py | 56 - ...d_cloud_build_create_build_trigger_sync.py | 56 - ...ed_cloud_build_create_worker_pool_async.py | 57 - ...ted_cloud_build_create_worker_pool_sync.py | 57 - ..._cloud_build_delete_build_trigger_async.py | 51 - ...d_cloud_build_delete_build_trigger_sync.py | 51 - ...ed_cloud_build_delete_worker_pool_async.py | 56 - ...ted_cloud_build_delete_worker_pool_sync.py | 56 - ...1_generated_cloud_build_get_build_async.py | 53 - ...v1_generated_cloud_build_get_build_sync.py | 53 - ...ted_cloud_build_get_build_trigger_async.py | 53 - ...ated_cloud_build_get_build_trigger_sync.py | 53 - ...rated_cloud_build_get_worker_pool_async.py | 52 - ...erated_cloud_build_get_worker_pool_sync.py | 52 - ...d_cloud_build_list_build_triggers_async.py | 53 - ...ed_cloud_build_list_build_triggers_sync.py | 53 - ...generated_cloud_build_list_builds_async.py | 53 - ..._generated_cloud_build_list_builds_sync.py | 53 - ...ted_cloud_build_list_worker_pools_async.py | 53 - ...ated_cloud_build_list_worker_pools_sync.py | 53 - ...oud_build_receive_trigger_webhook_async.py | 51 - ...loud_build_receive_trigger_webhook_sync.py | 51 - ...generated_cloud_build_retry_build_async.py | 57 - ..._generated_cloud_build_retry_build_sync.py | 57 - ...ted_cloud_build_run_build_trigger_async.py | 57 - ...ated_cloud_build_run_build_trigger_sync.py | 57 - ..._cloud_build_update_build_trigger_async.py | 57 - ...d_cloud_build_update_build_trigger_sync.py | 57 - ...ed_cloud_build_update_worker_pool_async.py | 55 - ...ted_cloud_build_update_worker_pool_sync.py | 55 - ...etadata_google.devtools.cloudbuild.v1.json | 3027 ----- .../scripts/fixup_cloudbuild_v1_keywords.py | 193 - owl-bot-staging/v1/setup.py | 90 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 9 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/cloudbuild_v1/__init__.py | 16 - .../gapic/cloudbuild_v1/test_cloud_build.py | 10280 ---------------- owl-bot-staging/v2/.coveragerc | 13 - owl-bot-staging/v2/.flake8 | 33 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - .../docs/cloudbuild_v2/repository_manager.rst | 10 - .../v2/docs/cloudbuild_v2/services.rst | 6 - .../v2/docs/cloudbuild_v2/types.rst | 6 - owl-bot-staging/v2/docs/conf.py | 376 - owl-bot-staging/v2/docs/index.rst | 7 - .../cloud/devtools/cloudbuild/__init__.py | 93 - .../devtools/cloudbuild/gapic_version.py | 16 - .../google/cloud/devtools/cloudbuild/py.typed | 2 - .../cloud/devtools/cloudbuild_v2/__init__.py | 94 - .../cloudbuild_v2/gapic_metadata.json | 238 - .../devtools/cloudbuild_v2/gapic_version.py | 16 - .../cloud/devtools/cloudbuild_v2/py.typed | 2 - .../cloudbuild_v2/services/__init__.py | 15 - .../services/repository_manager/__init__.py | 22 - .../repository_manager/async_client.py | 2257 ---- .../services/repository_manager/client.py | 2445 ---- .../services/repository_manager/pagers.py | 381 - .../repository_manager/transports/__init__.py | 38 - .../repository_manager/transports/base.py | 431 - .../repository_manager/transports/grpc.py | 743 -- .../transports/grpc_asyncio.py | 742 -- .../repository_manager/transports/rest.py | 2275 ---- .../devtools/cloudbuild_v2/types/__init__.py | 88 - .../cloudbuild_v2/types/cloudbuild.py | 159 - .../cloudbuild_v2/types/repositories.py | 1104 -- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 184 - ...manager_batch_create_repositories_async.py | 62 - ..._manager_batch_create_repositories_sync.py | 62 - ...ository_manager_create_connection_async.py | 57 - ...pository_manager_create_connection_sync.py | 57 - ...ository_manager_create_repository_async.py | 61 - ...pository_manager_create_repository_sync.py | 61 - ...ository_manager_delete_connection_async.py | 56 - ...pository_manager_delete_connection_sync.py | 56 - ...ository_manager_delete_repository_async.py | 56 - ...pository_manager_delete_repository_sync.py | 56 - ...repository_manager_fetch_git_refs_async.py | 52 - ..._repository_manager_fetch_git_refs_sync.py | 52 - ...nager_fetch_linkable_repositories_async.py | 53 - ...anager_fetch_linkable_repositories_sync.py | 53 - ...pository_manager_fetch_read_token_async.py | 52 - ...epository_manager_fetch_read_token_sync.py | 52 - ...ry_manager_fetch_read_write_token_async.py | 52 - ...ory_manager_fetch_read_write_token_sync.py | 52 - ...repository_manager_get_connection_async.py | 52 - ..._repository_manager_get_connection_sync.py | 52 - ...repository_manager_get_repository_async.py | 52 - ..._repository_manager_get_repository_sync.py | 52 - ...pository_manager_list_connections_async.py | 53 - ...epository_manager_list_connections_sync.py | 53 - ...ository_manager_list_repositories_async.py | 53 - ...pository_manager_list_repositories_sync.py | 53 - ...ository_manager_update_connection_async.py | 55 - ...pository_manager_update_connection_sync.py | 55 - ...etadata_google.devtools.cloudbuild.v2.json | 2309 ---- .../scripts/fixup_cloudbuild_v2_keywords.py | 189 - owl-bot-staging/v2/setup.py | 91 - .../v2/testing/constraints-3.10.txt | 7 - .../v2/testing/constraints-3.11.txt | 7 - .../v2/testing/constraints-3.12.txt | 7 - .../v2/testing/constraints-3.7.txt | 10 - .../v2/testing/constraints-3.8.txt | 7 - .../v2/testing/constraints-3.9.txt | 7 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/cloudbuild_v2/__init__.py | 16 - .../cloudbuild_v2/test_repository_manager.py | 9596 --------------- .../gapic/cloudbuild_v1/test_cloud_build.py | 125 +- 155 files changed, 63 insertions(+), 56951 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/cloudbuild_v1/types.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json delete mode 100644 owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/.flake8 delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/cloudbuild_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py delete mode 100644 owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py delete mode 100644 owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json delete mode 100644 owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 8870b9b6..ad35d7b7 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -208,23 +208,18 @@ def parse_build_path(path: str) -> Dict[str, str]: @staticmethod def build_trigger_path( project: str, - location: str, trigger: str, ) -> str: """Returns a fully-qualified build_trigger string.""" - return "projects/{project}/locations/{location}/triggers/{trigger}".format( + return "projects/{project}/triggers/{trigger}".format( project=project, - location=location, trigger=trigger, ) @staticmethod def parse_build_trigger_path(path: str) -> Dict[str, str]: """Parses a build_trigger path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index a0cf72db..00000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/devtools/cloudbuild/__init__.py - google/cloud/devtools/cloudbuild/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index af14cd40..00000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/devtools/cloudbuild *.py -recursive-include google/cloud/devtools/cloudbuild_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index c788a1b3..00000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Devtools Cloudbuild API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Devtools Cloudbuild API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst deleted file mode 100644 index be81dc5c..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/cloud_build.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudBuild ----------------------------- - -.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build - :members: - :inherited-members: - -.. automodule:: google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst deleted file mode 100644 index c0bdc88d..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Devtools Cloudbuild v1 API -==================================================== -.. toctree:: - :maxdepth: 2 - - cloud_build diff --git a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst b/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst deleted file mode 100644 index 0e955742..00000000 --- a/owl-bot-staging/v1/docs/cloudbuild_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Devtools Cloudbuild v1 API -================================================= - -.. automodule:: google.cloud.devtools.cloudbuild_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 4bd8e2dd..00000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-build documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-build" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Devtools Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-build-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-build.tex", - u"google-cloud-build Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-build", - u"Google Cloud Devtools Cloudbuild Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-build", - u"google-cloud-build Documentation", - author, - "google-cloud-build", - "GAPIC library for Google Cloud Devtools Cloudbuild API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 37eed237..00000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - cloudbuild_v1/services - cloudbuild_v1/types diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py deleted file mode 100644 index 06ff95e8..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/__init__.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient - -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApprovalResult -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ApproveBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildApproval -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildOptions -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildStep -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuildTrigger -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import InlineSecret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PrivatePoolV1Config -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PubsubConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PullRequestFilter -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import PushFilter -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ReceiveTriggerWebhookResponse -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepositoryEventConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RepoSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Results -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RetryBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import RunBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SecretManagerSecret -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Secrets -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Source -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSourceManifest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolOperationMetadata -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedMavenArtifact -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedNpmPackage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UploadedPythonPackage -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WebhookConfig -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool - -__all__ = ('CloudBuildClient', - 'CloudBuildAsyncClient', - 'ApprovalConfig', - 'ApprovalResult', - 'ApproveBuildRequest', - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildApproval', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolOperationMetadata', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolOperationMetadata', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'GitSource', - 'Hash', - 'InlineSecret', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'PrivatePoolV1Config', - 'PubsubConfig', - 'PullRequestFilter', - 'PushFilter', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'RepositoryEventConfig', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'SecretManagerSecret', - 'Secrets', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'StorageSourceManifest', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolRequest', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'UploadedPythonPackage', - 'Volume', - 'WebhookConfig', - 'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py deleted file mode 100644 index 9fcffdb4..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/__init__.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.cloud_build import CloudBuildClient -from .services.cloud_build import CloudBuildAsyncClient - -from .types.cloudbuild import ApprovalConfig -from .types.cloudbuild import ApprovalResult -from .types.cloudbuild import ApproveBuildRequest -from .types.cloudbuild import ArtifactResult -from .types.cloudbuild import Artifacts -from .types.cloudbuild import Build -from .types.cloudbuild import BuildApproval -from .types.cloudbuild import BuildOperationMetadata -from .types.cloudbuild import BuildOptions -from .types.cloudbuild import BuildStep -from .types.cloudbuild import BuildTrigger -from .types.cloudbuild import BuiltImage -from .types.cloudbuild import CancelBuildRequest -from .types.cloudbuild import CreateBuildRequest -from .types.cloudbuild import CreateBuildTriggerRequest -from .types.cloudbuild import CreateWorkerPoolOperationMetadata -from .types.cloudbuild import CreateWorkerPoolRequest -from .types.cloudbuild import DeleteBuildTriggerRequest -from .types.cloudbuild import DeleteWorkerPoolOperationMetadata -from .types.cloudbuild import DeleteWorkerPoolRequest -from .types.cloudbuild import FileHashes -from .types.cloudbuild import GetBuildRequest -from .types.cloudbuild import GetBuildTriggerRequest -from .types.cloudbuild import GetWorkerPoolRequest -from .types.cloudbuild import GitHubEventsConfig -from .types.cloudbuild import GitSource -from .types.cloudbuild import Hash -from .types.cloudbuild import InlineSecret -from .types.cloudbuild import ListBuildsRequest -from .types.cloudbuild import ListBuildsResponse -from .types.cloudbuild import ListBuildTriggersRequest -from .types.cloudbuild import ListBuildTriggersResponse -from .types.cloudbuild import ListWorkerPoolsRequest -from .types.cloudbuild import ListWorkerPoolsResponse -from .types.cloudbuild import PrivatePoolV1Config -from .types.cloudbuild import PubsubConfig -from .types.cloudbuild import PullRequestFilter -from .types.cloudbuild import PushFilter -from .types.cloudbuild import ReceiveTriggerWebhookRequest -from .types.cloudbuild import ReceiveTriggerWebhookResponse -from .types.cloudbuild import RepositoryEventConfig -from .types.cloudbuild import RepoSource -from .types.cloudbuild import Results -from .types.cloudbuild import RetryBuildRequest -from .types.cloudbuild import RunBuildTriggerRequest -from .types.cloudbuild import Secret -from .types.cloudbuild import SecretManagerSecret -from .types.cloudbuild import Secrets -from .types.cloudbuild import Source -from .types.cloudbuild import SourceProvenance -from .types.cloudbuild import StorageSource -from .types.cloudbuild import StorageSourceManifest -from .types.cloudbuild import TimeSpan -from .types.cloudbuild import UpdateBuildTriggerRequest -from .types.cloudbuild import UpdateWorkerPoolOperationMetadata -from .types.cloudbuild import UpdateWorkerPoolRequest -from .types.cloudbuild import UploadedMavenArtifact -from .types.cloudbuild import UploadedNpmPackage -from .types.cloudbuild import UploadedPythonPackage -from .types.cloudbuild import Volume -from .types.cloudbuild import WebhookConfig -from .types.cloudbuild import WorkerPool - -__all__ = ( - 'CloudBuildAsyncClient', -'ApprovalConfig', -'ApprovalResult', -'ApproveBuildRequest', -'ArtifactResult', -'Artifacts', -'Build', -'BuildApproval', -'BuildOperationMetadata', -'BuildOptions', -'BuildStep', -'BuildTrigger', -'BuiltImage', -'CancelBuildRequest', -'CloudBuildClient', -'CreateBuildRequest', -'CreateBuildTriggerRequest', -'CreateWorkerPoolOperationMetadata', -'CreateWorkerPoolRequest', -'DeleteBuildTriggerRequest', -'DeleteWorkerPoolOperationMetadata', -'DeleteWorkerPoolRequest', -'FileHashes', -'GetBuildRequest', -'GetBuildTriggerRequest', -'GetWorkerPoolRequest', -'GitHubEventsConfig', -'GitSource', -'Hash', -'InlineSecret', -'ListBuildTriggersRequest', -'ListBuildTriggersResponse', -'ListBuildsRequest', -'ListBuildsResponse', -'ListWorkerPoolsRequest', -'ListWorkerPoolsResponse', -'PrivatePoolV1Config', -'PubsubConfig', -'PullRequestFilter', -'PushFilter', -'ReceiveTriggerWebhookRequest', -'ReceiveTriggerWebhookResponse', -'RepoSource', -'RepositoryEventConfig', -'Results', -'RetryBuildRequest', -'RunBuildTriggerRequest', -'Secret', -'SecretManagerSecret', -'Secrets', -'Source', -'SourceProvenance', -'StorageSource', -'StorageSourceManifest', -'TimeSpan', -'UpdateBuildTriggerRequest', -'UpdateWorkerPoolOperationMetadata', -'UpdateWorkerPoolRequest', -'UploadedMavenArtifact', -'UploadedNpmPackage', -'UploadedPythonPackage', -'Volume', -'WebhookConfig', -'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json deleted file mode 100644 index 2648fd24..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json +++ /dev/null @@ -1,298 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.devtools.cloudbuild_v1", - "protoPackage": "google.devtools.cloudbuild.v1", - "schema": "1.0", - "services": { - "CloudBuild": { - "clients": { - "grpc": { - "libraryClient": "CloudBuildClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudBuildAsyncClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - }, - "rest": { - "libraryClient": "CloudBuildClient", - "rpcs": { - "ApproveBuild": { - "methods": [ - "approve_build" - ] - }, - "CancelBuild": { - "methods": [ - "cancel_build" - ] - }, - "CreateBuild": { - "methods": [ - "create_build" - ] - }, - "CreateBuildTrigger": { - "methods": [ - "create_build_trigger" - ] - }, - "CreateWorkerPool": { - "methods": [ - "create_worker_pool" - ] - }, - "DeleteBuildTrigger": { - "methods": [ - "delete_build_trigger" - ] - }, - "DeleteWorkerPool": { - "methods": [ - "delete_worker_pool" - ] - }, - "GetBuild": { - "methods": [ - "get_build" - ] - }, - "GetBuildTrigger": { - "methods": [ - "get_build_trigger" - ] - }, - "GetWorkerPool": { - "methods": [ - "get_worker_pool" - ] - }, - "ListBuildTriggers": { - "methods": [ - "list_build_triggers" - ] - }, - "ListBuilds": { - "methods": [ - "list_builds" - ] - }, - "ListWorkerPools": { - "methods": [ - "list_worker_pools" - ] - }, - "ReceiveTriggerWebhook": { - "methods": [ - "receive_trigger_webhook" - ] - }, - "RetryBuild": { - "methods": [ - "retry_build" - ] - }, - "RunBuildTrigger": { - "methods": [ - "run_build_trigger" - ] - }, - "UpdateBuildTrigger": { - "methods": [ - "update_build_trigger" - ] - }, - "UpdateWorkerPool": { - "methods": [ - "update_worker_pool" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py deleted file mode 100644 index 89a37dc9..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py deleted file mode 100644 index b796e7cd..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudBuildClient -from .async_client import CloudBuildAsyncClient - -__all__ = ( - 'CloudBuildClient', - 'CloudBuildAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py deleted file mode 100644 index 00eb8e04..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ /dev/null @@ -1,2601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .client import CloudBuildClient - - -class CloudBuildAsyncClient: - """Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - """ - - _client: CloudBuildClient - - DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT - - build_path = staticmethod(CloudBuildClient.build_path) - parse_build_path = staticmethod(CloudBuildClient.parse_build_path) - build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) - parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) - crypto_key_path = staticmethod(CloudBuildClient.crypto_key_path) - parse_crypto_key_path = staticmethod(CloudBuildClient.parse_crypto_key_path) - network_path = staticmethod(CloudBuildClient.network_path) - parse_network_path = staticmethod(CloudBuildClient.parse_network_path) - repository_path = staticmethod(CloudBuildClient.repository_path) - parse_repository_path = staticmethod(CloudBuildClient.parse_repository_path) - secret_version_path = staticmethod(CloudBuildClient.secret_version_path) - parse_secret_version_path = staticmethod(CloudBuildClient.parse_secret_version_path) - service_account_path = staticmethod(CloudBuildClient.service_account_path) - parse_service_account_path = staticmethod(CloudBuildClient.parse_service_account_path) - subscription_path = staticmethod(CloudBuildClient.subscription_path) - parse_subscription_path = staticmethod(CloudBuildClient.parse_subscription_path) - topic_path = staticmethod(CloudBuildClient.topic_path) - parse_topic_path = staticmethod(CloudBuildClient.parse_topic_path) - worker_pool_path = staticmethod(CloudBuildClient.worker_pool_path) - parse_worker_pool_path = staticmethod(CloudBuildClient.parse_worker_pool_path) - common_billing_account_path = staticmethod(CloudBuildClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudBuildClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CloudBuildClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudBuildClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudBuildClient.parse_common_organization_path) - common_project_path = staticmethod(CloudBuildClient.common_project_path) - parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) - common_location_path = staticmethod(CloudBuildClient.common_location_path) - parse_common_location_path = staticmethod(CloudBuildClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_info.__func__(CloudBuildAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_file.__func__(CloudBuildAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CloudBuildClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudBuildTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudBuildClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_build(self, - request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - build: Optional[cloudbuild.Build] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]]): - The request object. Request to create a new build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - build (:class:`google.cloud.devtools.cloudbuild_v1.types.Build`): - Required. Build resource to create. - This corresponds to the ``build`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, build]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_build(self, - request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.get_build(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]]): - The request object. Request to get a build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_build, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_builds(self, - request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsAsyncPager: - r"""Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]]): - The request object. Request to list builds. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - The raw filter text to constrain the - results. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager: - Response including listed builds. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, filter]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListBuildsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_builds, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBuildsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_build(self, - request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Cancels a build in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.cancel_build(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]]): - The request object. Request to cancel an ongoing build. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CancelBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def retry_build(self, - request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]]): - The request object. Specifies a build to retry. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (:class:`str`): - Required. Build ID of the original - build. - - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.RetryBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.retry_build, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("id", request.id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def approve_build(self, - request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, - *, - name: Optional[str] = None, - approval_result: Optional[cloudbuild.ApprovalResult] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]]): - The request object. Request to approve or reject a - pending build. - name (:class:`str`): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - approval_result (:class:`google.cloud.devtools.cloudbuild_v1.types.ApprovalResult`): - Approval decision and metadata. - This corresponds to the ``approval_result`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, approval_result]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ApproveBuildRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if approval_result is not None: - request.approval_result = approval_result - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.approve_build, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def create_build_trigger(self, - request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Creates a new ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.create_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]]): - The request object. Request to create a new ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project for which - to configure automatic builds. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): - Required. ``BuildTrigger`` to create. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_build_trigger(self, - request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Returns information about a ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = await client.get_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]]): - The request object. Returns the ``BuildTrigger`` with the specified ID. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_build_triggers(self, - request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersAsyncPager: - r"""Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]]): - The request object. Request to list existing ``BuildTriggers``. - project_id (:class:`str`): - Required. ID of the project for which - to list BuildTriggers. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager: - Response containing existing BuildTriggers. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListBuildTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_build_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBuildTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_build_trigger(self, - request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - await client.delete_build_trigger(request=request) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]]): - The request object. Request to delete a ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the ``BuildTrigger`` to delete. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.DeleteBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_build_trigger(self, - request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.update_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]]): - The request object. Request to update an existing ``BuildTrigger``. - project_id (:class:`str`): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the ``BuildTrigger`` to update. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (:class:`google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`): - Required. ``BuildTrigger`` to update. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.UpdateBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_build_trigger(self, - request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - source: Optional[cloudbuild.RepoSource] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]]): - The request object. Specifies a build trigger to run and - the source to use. - project_id (:class:`str`): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (:class:`str`): - Required. ID of the trigger. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): - Source to build against this trigger. - Branch and tag names cannot consist of - regular expressions. - - This corresponds to the ``source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, source]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.RunBuildTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_build_trigger, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - async def receive_trigger_webhook(self, - request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = await client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]]): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the response object for the - ReceiveTriggerWebhook method. - - """ - # Create or coerce a protobuf request object. - request = cloudbuild.ReceiveTriggerWebhookRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.receive_trigger_webhook, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger", request.trigger), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_worker_pool(self, - request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, - *, - parent: Optional[str] = None, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - worker_pool_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]]): - The request object. Request to create a new ``WorkerPool``. - parent (:class:`str`): - Required. The parent resource where this worker pool - will be created. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): - Required. ``WorkerPool`` resource to create. - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool_id (:class:`str`): - Required. Immutable. The ID to use for the - ``WorkerPool``, which will become the final component of - the resource name. - - This value should be 1-63 characters, and valid - characters are /[a-z][0-9]-/. - - This corresponds to the ``worker_pool_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, worker_pool, worker_pool_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.CreateWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if worker_pool is not None: - request.worker_pool = worker_pool - if worker_pool_id is not None: - request.worker_pool_id = worker_pool_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_worker_pool(self, - request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: - r"""Returns details of a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = await client.get_worker_pool(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]]): - The request object. Request to get a ``WorkerPool`` with the specified name. - name (:class:`str`): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.WorkerPool: - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.GetWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_worker_pool, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_worker_pool(self, - request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]]): - The request object. Request to delete a ``WorkerPool``. - name (:class:`str`): - Required. The name of the ``WorkerPool`` to delete. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.DeleteWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_worker_pool(self, - request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, - *, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]]): - The request object. Request to update a ``WorkerPool``. - worker_pool (:class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool`): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the - ``WorkerPool`` to update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask specifying which fields in ``worker_pool`` to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([worker_pool, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.UpdateWorkerPoolRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if worker_pool is not None: - request.worker_pool = worker_pool - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_worker_pool, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("worker_pool.name", request.worker_pool.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_worker_pools(self, - request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkerPoolsAsyncPager: - r"""Lists ``WorkerPool``\ s. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - async def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]]): - The request object. Request to list ``WorkerPool``\ s. - parent (:class:`str`): - Required. The parent of the collection of - ``WorkerPools``. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager: - Response containing existing WorkerPools. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudbuild.ListWorkerPoolsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_worker_pools, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListWorkerPoolsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "CloudBuildAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CloudBuildAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py deleted file mode 100644 index 927380bf..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ /dev/null @@ -1,2899 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudBuildGrpcTransport -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .transports.rest import CloudBuildRestTransport - - -class CloudBuildClientMeta(type): - """Metaclass for the CloudBuild client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] - _transport_registry["grpc"] = CloudBuildGrpcTransport - _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport - _transport_registry["rest"] = CloudBuildRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudBuildTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudBuildClient(metaclass=CloudBuildClientMeta): - """Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def build_path(project: str,build: str,) -> str: - """Returns a fully-qualified build string.""" - return "projects/{project}/builds/{build}".format(project=project, build=build, ) - - @staticmethod - def parse_build_path(path: str) -> Dict[str,str]: - """Parses a build path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def build_trigger_path(project: str,trigger: str,) -> str: - """Returns a fully-qualified build_trigger string.""" - return "projects/{project}/triggers/{trigger}".format(project=project, trigger=trigger, ) - - @staticmethod - def parse_build_trigger_path(path: str) -> Dict[str,str]: - """Parses a build_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_path(project: str,location: str,keyring: str,key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def network_path(project: str,network: str,) -> str: - """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: - """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def repository_path(project: str,location: str,connection: str,repository: str,) -> str: - """Returns a fully-qualified repository string.""" - return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - - @staticmethod - def parse_repository_path(path: str) -> Dict[str,str]: - """Parses a repository path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def secret_version_path(project: str,secret: str,version: str,) -> str: - """Returns a fully-qualified secret_version string.""" - return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - - @staticmethod - def parse_secret_version_path(path: str) -> Dict[str,str]: - """Parses a secret_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_account_path(project: str,service_account: str,) -> str: - """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - - @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: - """Parses a service_account path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def subscription_path(project: str,subscription: str,) -> str: - """Returns a fully-qualified subscription string.""" - return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - - @staticmethod - def parse_subscription_path(path: str) -> Dict[str,str]: - """Parses a subscription path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def topic_path(project: str,topic: str,) -> str: - """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - - @staticmethod - def parse_topic_path(path: str) -> Dict[str,str]: - """Parses a topic path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def worker_pool_path(project: str,location: str,worker_pool: str,) -> str: - """Returns a fully-qualified worker_pool string.""" - return "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) - - @staticmethod - def parse_worker_pool_path(path: str) -> Dict[str,str]: - """Parses a worker_pool path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudBuildTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudBuildTransport): - # transport is a CloudBuildTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_build(self, - request: Optional[Union[cloudbuild.CreateBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - build: Optional[cloudbuild.Build] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest, dict]): - The request object. Request to create a new build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Required. Build resource to create. - This corresponds to the ``build`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, build]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateBuildRequest): - request = cloudbuild.CreateBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def get_build(self, - request: Optional[Union[cloudbuild.GetBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.get_build(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest, dict]): - The request object. Request to get a build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetBuildRequest): - request = cloudbuild.GetBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_builds(self, - request: Optional[Union[cloudbuild.ListBuildsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsPager: - r"""Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest, dict]): - The request object. Request to list builds. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - The raw filter text to constrain the - results. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager: - Response including listed builds. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, filter]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListBuildsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListBuildsRequest): - request = cloudbuild.ListBuildsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_builds] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBuildsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_build(self, - request: Optional[Union[cloudbuild.CancelBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: - r"""Cancels a build in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.cancel_build(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest, dict]): - The request object. Request to cancel an ongoing build. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. ID of the build. - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.Build: - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CancelBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CancelBuildRequest): - request = cloudbuild.CancelBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def retry_build(self, - request: Optional[Union[cloudbuild.RetryBuildRequest, dict]] = None, - *, - project_id: Optional[str] = None, - id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest, dict]): - The request object. Specifies a build to retry. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - id (str): - Required. Build ID of the original - build. - - This corresponds to the ``id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.RetryBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.RetryBuildRequest): - request = cloudbuild.RetryBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.retry_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def approve_build(self, - request: Optional[Union[cloudbuild.ApproveBuildRequest, dict]] = None, - *, - name: Optional[str] = None, - approval_result: Optional[cloudbuild.ApprovalResult] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest, dict]): - The request object. Request to approve or reject a - pending build. - name (str): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Approval decision and metadata. - This corresponds to the ``approval_result`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, approval_result]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ApproveBuildRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ApproveBuildRequest): - request = cloudbuild.ApproveBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if approval_result is not None: - request.approval_result = approval_result - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.approve_build] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def create_build_trigger(self, - request: Optional[Union[cloudbuild.CreateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Creates a new ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = client.create_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest, dict]): - The request object. Request to create a new ``BuildTrigger``. - project_id (str): - Required. ID of the project for which - to configure automatic builds. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to create. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): - request = cloudbuild.CreateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_build_trigger(self, - request: Optional[Union[cloudbuild.GetBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Returns information about a ``BuildTrigger``. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = client.get_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest, dict]): - The request object. Returns the ``BuildTrigger`` with the specified ID. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetBuildTriggerRequest): - request = cloudbuild.GetBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_build_triggers(self, - request: Optional[Union[cloudbuild.ListBuildTriggersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersPager: - r"""Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest, dict]): - The request object. Request to list existing ``BuildTriggers``. - project_id (str): - Required. ID of the project for which - to list BuildTriggers. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager: - Response containing existing BuildTriggers. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListBuildTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListBuildTriggersRequest): - request = cloudbuild.ListBuildTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBuildTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_build_trigger(self, - request: Optional[Union[cloudbuild.DeleteBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - client.delete_build_trigger(request=request) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest, dict]): - The request object. Request to delete a ``BuildTrigger``. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to delete. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.DeleteBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): - request = cloudbuild.DeleteBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_build_trigger(self, - request: Optional[Union[cloudbuild.UpdateBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - trigger: Optional[cloudbuild.BuildTrigger] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: - r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = client.update_build_trigger(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest, dict]): - The request object. Request to update an existing ``BuildTrigger``. - project_id (str): - Required. ID of the project that owns - the trigger. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to update. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to update. - This corresponds to the ``trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.UpdateBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): - request = cloudbuild.UpdateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.trigger.resource_name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_build_trigger(self, - request: Optional[Union[cloudbuild.RunBuildTriggerRequest, dict]] = None, - *, - project_id: Optional[str] = None, - trigger_id: Optional[str] = None, - source: Optional[cloudbuild.RepoSource] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest, dict]): - The request object. Specifies a build trigger to run and - the source to use. - project_id (str): - Required. ID of the project. - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - trigger_id (str): - Required. ID of the trigger. - This corresponds to the ``trigger_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Source to build against this trigger. - Branch and tag names cannot consist of - regular expressions. - - This corresponds to the ``source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.Build` - A build resource in the Cloud Build API. - - At a high level, a Build describes where to find - source code, how to build it (for example, the - builder image to run on the source), and where to - store the built artifacts. - - Fields can include the following variables, which - will be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified - by RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA - specified by RepoSource or resolved from the - specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, trigger_id, source]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.RunBuildTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.RunBuildTriggerRequest): - request = cloudbuild.RunBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.Build, - metadata_type=cloudbuild.BuildOperationMetadata, - ) - - # Done; return the response. - return response - - def receive_trigger_webhook(self, - request: Optional[Union[cloudbuild.ReceiveTriggerWebhookRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest, dict]): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the response object for the - ReceiveTriggerWebhook method. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ReceiveTriggerWebhookRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ReceiveTriggerWebhookRequest): - request = cloudbuild.ReceiveTriggerWebhookRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.receive_trigger_webhook] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("trigger", request.trigger), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_worker_pool(self, - request: Optional[Union[cloudbuild.CreateWorkerPoolRequest, dict]] = None, - *, - parent: Optional[str] = None, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - worker_pool_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest, dict]): - The request object. Request to create a new ``WorkerPool``. - parent (str): - Required. The parent resource where this worker pool - will be created. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. ``WorkerPool`` resource to create. - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - worker_pool_id (str): - Required. Immutable. The ID to use for the - ``WorkerPool``, which will become the final component of - the resource name. - - This value should be 1-63 characters, and valid - characters are /[a-z][0-9]-/. - - This corresponds to the ``worker_pool_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, worker_pool, worker_pool_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.CreateWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): - request = cloudbuild.CreateWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if worker_pool is not None: - request.worker_pool = worker_pool - if worker_pool_id is not None: - request.worker_pool_id = worker_pool_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.CreateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def get_worker_pool(self, - request: Optional[Union[cloudbuild.GetWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: - r"""Returns details of a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = client.get_worker_pool(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest, dict]): - The request object. Request to get a ``WorkerPool`` with the specified name. - name (str): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.types.WorkerPool: - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.GetWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.GetWorkerPoolRequest): - request = cloudbuild.GetWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_worker_pool(self, - request: Optional[Union[cloudbuild.DeleteWorkerPoolRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest, dict]): - The request object. Request to delete a ``WorkerPool``. - name (str): - Required. The name of the ``WorkerPool`` to delete. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.DeleteWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): - request = cloudbuild.DeleteWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.DeleteWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def update_worker_pool(self, - request: Optional[Union[cloudbuild.UpdateWorkerPoolRequest, dict]] = None, - *, - worker_pool: Optional[cloudbuild.WorkerPool] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a ``WorkerPool``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest, dict]): - The request object. Request to update a ``WorkerPool``. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the - ``WorkerPool`` to update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - - This corresponds to the ``worker_pool`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v1.types.WorkerPool` - Configuration for a WorkerPool. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a WorkerPool to run your - builds. Private WorkerPools give your builds access - to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see - [Private pools - overview](\ https://cloud.google.com/build/docs/private-pools/private-pools-overview). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([worker_pool, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.UpdateWorkerPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): - request = cloudbuild.UpdateWorkerPoolRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if worker_pool is not None: - request.worker_pool = worker_pool - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$') - regex_match = routing_param_regex.match(request.worker_pool.name) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloudbuild.WorkerPool, - metadata_type=cloudbuild.UpdateWorkerPoolOperationMetadata, - ) - - # Done; return the response. - return response - - def list_worker_pools(self, - request: Optional[Union[cloudbuild.ListWorkerPoolsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkerPoolsPager: - r"""Lists ``WorkerPool``\ s. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v1 - - def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest, dict]): - The request object. Request to list ``WorkerPool``\ s. - parent (str): - Required. The parent of the collection of - ``WorkerPools``. Format: - ``projects/{project}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager: - Response containing existing WorkerPools. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudbuild.ListWorkerPoolsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): - request = cloudbuild.ListWorkerPoolsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] - - header_params = {} - - routing_param_regex = re.compile('^projects/[^/]+/locations/(?P[^/]+)$') - regex_match = routing_param_regex.match(request.parent) - if regex_match and regex_match.group("location"): - header_params["location"] = regex_match.group("location") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListWorkerPoolsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CloudBuildClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "CloudBuildClient", -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py deleted file mode 100644 index f255025e..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild - - -class ListBuildsPager: - """A pager for iterating through ``list_builds`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``builds`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBuilds`` requests and continue to iterate - through the ``builds`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildsResponse], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListBuildsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.Build]: - for page in self.pages: - yield from page.builds - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildsAsyncPager: - """A pager for iterating through ``list_builds`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``builds`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBuilds`` requests and continue to iterate - through the ``builds`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListBuildsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.Build]: - async def async_generator(): - async for page in self.pages: - for response in page.builds: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildTriggersPager: - """A pager for iterating through ``list_build_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBuildTriggers`` requests and continue to iterate - through the ``triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildTriggersResponse], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListBuildTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.BuildTrigger]: - for page in self.pages: - yield from page.triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBuildTriggersAsyncPager: - """A pager for iterating through ``list_build_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBuildTriggers`` requests and continue to iterate - through the ``triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListBuildTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListBuildTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.BuildTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkerPoolsPager: - """A pager for iterating through ``list_worker_pools`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``worker_pools`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListWorkerPools`` requests and continue to iterate - through the ``worker_pools`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudbuild.ListWorkerPoolsResponse], - request: cloudbuild.ListWorkerPoolsRequest, - response: cloudbuild.ListWorkerPoolsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListWorkerPoolsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloudbuild.ListWorkerPoolsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloudbuild.WorkerPool]: - for page in self.pages: - yield from page.worker_pools - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkerPoolsAsyncPager: - """A pager for iterating through ``list_worker_pools`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``worker_pools`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListWorkerPools`` requests and continue to iterate - through the ``worker_pools`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListWorkerPoolsResponse]], - request: cloudbuild.ListWorkerPoolsRequest, - response: cloudbuild.ListWorkerPoolsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudbuild.ListWorkerPoolsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloudbuild.ListWorkerPoolsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[cloudbuild.WorkerPool]: - async def async_generator(): - async for page in self.pages: - for response in page.worker_pools: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py deleted file mode 100644 index d7a78973..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudBuildTransport -from .grpc import CloudBuildGrpcTransport -from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .rest import CloudBuildRestTransport -from .rest import CloudBuildRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] -_transport_registry['grpc'] = CloudBuildGrpcTransport -_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport -_transport_registry['rest'] = CloudBuildRestTransport - -__all__ = ( - 'CloudBuildTransport', - 'CloudBuildGrpcTransport', - 'CloudBuildGrpcAsyncIOTransport', - 'CloudBuildRestTransport', - 'CloudBuildRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py deleted file mode 100644 index cf5f61cc..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ /dev/null @@ -1,443 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.devtools.cloudbuild_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class CloudBuildTransport(abc.ABC): - """Abstract transport class for CloudBuild.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudbuild.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_build: gapic_v1.method.wrap_method( - self.create_build, - default_timeout=600.0, - client_info=client_info, - ), - self.get_build: gapic_v1.method.wrap_method( - self.get_build, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_builds: gapic_v1.method.wrap_method( - self.list_builds, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.cancel_build: gapic_v1.method.wrap_method( - self.cancel_build, - default_timeout=600.0, - client_info=client_info, - ), - self.retry_build: gapic_v1.method.wrap_method( - self.retry_build, - default_timeout=600.0, - client_info=client_info, - ), - self.approve_build: gapic_v1.method.wrap_method( - self.approve_build, - default_timeout=None, - client_info=client_info, - ), - self.create_build_trigger: gapic_v1.method.wrap_method( - self.create_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.get_build_trigger: gapic_v1.method.wrap_method( - self.get_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_build_triggers: gapic_v1.method.wrap_method( - self.list_build_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_build_trigger: gapic_v1.method.wrap_method( - self.delete_build_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.update_build_trigger: gapic_v1.method.wrap_method( - self.update_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.run_build_trigger: gapic_v1.method.wrap_method( - self.run_build_trigger, - default_timeout=600.0, - client_info=client_info, - ), - self.receive_trigger_webhook: gapic_v1.method.wrap_method( - self.receive_trigger_webhook, - default_timeout=None, - client_info=client_info, - ), - self.create_worker_pool: gapic_v1.method.wrap_method( - self.create_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.get_worker_pool: gapic_v1.method.wrap_method( - self.get_worker_pool, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_worker_pool: gapic_v1.method.wrap_method( - self.delete_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.update_worker_pool: gapic_v1.method.wrap_method( - self.update_worker_pool, - default_timeout=600.0, - client_info=client_info, - ), - self.list_worker_pools: gapic_v1.method.wrap_method( - self.list_worker_pools, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - Union[ - cloudbuild.Build, - Awaitable[cloudbuild.Build] - ]]: - raise NotImplementedError() - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - Union[ - cloudbuild.ListBuildsResponse, - Awaitable[cloudbuild.ListBuildsResponse] - ]]: - raise NotImplementedError() - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - Union[ - cloudbuild.Build, - Awaitable[cloudbuild.Build] - ]]: - raise NotImplementedError() - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Union[ - cloudbuild.ListBuildTriggersResponse, - Awaitable[cloudbuild.ListBuildTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - Union[ - cloudbuild.BuildTrigger, - Awaitable[cloudbuild.BuildTrigger] - ]]: - raise NotImplementedError() - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - Union[ - cloudbuild.ReceiveTriggerWebhookResponse, - Awaitable[cloudbuild.ReceiveTriggerWebhookResponse] - ]]: - raise NotImplementedError() - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - Union[ - cloudbuild.WorkerPool, - Awaitable[cloudbuild.WorkerPool] - ]]: - raise NotImplementedError() - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Union[ - cloudbuild.ListWorkerPoolsResponse, - Awaitable[cloudbuild.ListWorkerPoolsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'CloudBuildTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py deleted file mode 100644 index 08b246b2..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ /dev/null @@ -1,793 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO - - -class CloudBuildGrpcTransport(CloudBuildTransport): - """gRPC backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the create build method over gRPC. - - Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - Returns: - Callable[[~.CreateBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', - request_serializer=cloudbuild.CreateBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_build'] - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - cloudbuild.Build]: - r"""Return a callable for the get build method over gRPC. - - Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - Returns: - Callable[[~.GetBuildRequest], - ~.Build]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', - request_serializer=cloudbuild.GetBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['get_build'] - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - cloudbuild.ListBuildsResponse]: - r"""Return a callable for the list builds method over gRPC. - - Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - Returns: - Callable[[~.ListBuildsRequest], - ~.ListBuildsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', - request_serializer=cloudbuild.ListBuildsRequest.serialize, - response_deserializer=cloudbuild.ListBuildsResponse.deserialize, - ) - return self._stubs['list_builds'] - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - cloudbuild.Build]: - r"""Return a callable for the cancel build method over gRPC. - - Cancels a build in progress. - - Returns: - Callable[[~.CancelBuildRequest], - ~.Build]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', - request_serializer=cloudbuild.CancelBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['cancel_build'] - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the retry build method over gRPC. - - Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - Returns: - Callable[[~.RetryBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', - request_serializer=cloudbuild.RetryBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['retry_build'] - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - operations_pb2.Operation]: - r"""Return a callable for the approve build method over gRPC. - - Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - Returns: - Callable[[~.ApproveBuildRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'approve_build' not in self._stubs: - self._stubs['approve_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', - request_serializer=cloudbuild.ApproveBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['approve_build'] - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the create build trigger method over gRPC. - - Creates a new ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.CreateBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', - request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['create_build_trigger'] - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the get build trigger method over gRPC. - - Returns information about a ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.GetBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', - request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['get_build_trigger'] - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - cloudbuild.ListBuildTriggersResponse]: - r"""Return a callable for the list build triggers method over gRPC. - - Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - Returns: - Callable[[~.ListBuildTriggersRequest], - ~.ListBuildTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', - request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, - response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, - ) - return self._stubs['list_build_triggers'] - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete build trigger method over gRPC. - - Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.DeleteBuildTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', - request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_build_trigger'] - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - r"""Return a callable for the update build trigger method over gRPC. - - Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.UpdateBuildTriggerRequest], - ~.BuildTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', - request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['update_build_trigger'] - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - operations_pb2.Operation]: - r"""Return a callable for the run build trigger method over gRPC. - - Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - Returns: - Callable[[~.RunBuildTriggerRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', - request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['run_build_trigger'] - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - cloudbuild.ReceiveTriggerWebhookResponse]: - r"""Return a callable for the receive trigger webhook method over gRPC. - - ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - Returns: - Callable[[~.ReceiveTriggerWebhookRequest], - ~.ReceiveTriggerWebhookResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'receive_trigger_webhook' not in self._stubs: - self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', - request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, - response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, - ) - return self._stubs['receive_trigger_webhook'] - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the create worker pool method over gRPC. - - Creates a ``WorkerPool``. - - Returns: - Callable[[~.CreateWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', - request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_worker_pool'] - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - cloudbuild.WorkerPool]: - r"""Return a callable for the get worker pool method over gRPC. - - Returns details of a ``WorkerPool``. - - Returns: - Callable[[~.GetWorkerPoolRequest], - ~.WorkerPool]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', - request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, - response_deserializer=cloudbuild.WorkerPool.deserialize, - ) - return self._stubs['get_worker_pool'] - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete worker pool method over gRPC. - - Deletes a ``WorkerPool``. - - Returns: - Callable[[~.DeleteWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', - request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_worker_pool'] - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - operations_pb2.Operation]: - r"""Return a callable for the update worker pool method over gRPC. - - Updates a ``WorkerPool``. - - Returns: - Callable[[~.UpdateWorkerPoolRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', - request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_worker_pool'] - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - cloudbuild.ListWorkerPoolsResponse]: - r"""Return a callable for the list worker pools method over gRPC. - - Lists ``WorkerPool``\ s. - - Returns: - Callable[[~.ListWorkerPoolsRequest], - ~.ListWorkerPoolsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', - request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, - response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, - ) - return self._stubs['list_worker_pools'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'CloudBuildGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py deleted file mode 100644 index e9f45881..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ /dev/null @@ -1,792 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudBuildGrpcTransport - - -class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): - """gRPC AsyncIO backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create build method over gRPC. - - Starts a build with the specified configuration. - - This method returns a long-running ``Operation``, which includes - the build ID. Pass the build ID to ``GetBuild`` to determine the - build status (such as ``SUCCESS`` or ``FAILURE``). - - Returns: - Callable[[~.CreateBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', - request_serializer=cloudbuild.CreateBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_build'] - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - Awaitable[cloudbuild.Build]]: - r"""Return a callable for the get build method over gRPC. - - Returns information about a previously requested build. - - The ``Build`` that is returned includes its status (such as - ``SUCCESS``, ``FAILURE``, or ``WORKING``), and timing - information. - - Returns: - Callable[[~.GetBuildRequest], - Awaitable[~.Build]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', - request_serializer=cloudbuild.GetBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['get_build'] - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - Awaitable[cloudbuild.ListBuildsResponse]]: - r"""Return a callable for the list builds method over gRPC. - - Lists previously requested builds. - Previously requested builds may still be in-progress, or - may have finished successfully or unsuccessfully. - - Returns: - Callable[[~.ListBuildsRequest], - Awaitable[~.ListBuildsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', - request_serializer=cloudbuild.ListBuildsRequest.serialize, - response_deserializer=cloudbuild.ListBuildsResponse.deserialize, - ) - return self._stubs['list_builds'] - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - Awaitable[cloudbuild.Build]]: - r"""Return a callable for the cancel build method over gRPC. - - Cancels a build in progress. - - Returns: - Callable[[~.CancelBuildRequest], - Awaitable[~.Build]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', - request_serializer=cloudbuild.CancelBuildRequest.serialize, - response_deserializer=cloudbuild.Build.deserialize, - ) - return self._stubs['cancel_build'] - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the retry build method over gRPC. - - Creates a new build based on the specified build. - - This method creates a new build using the original build - request, which may or may not result in an identical build. - - For triggered builds: - - - Triggered builds resolve to a precise revision; therefore a - retry of a triggered build will result in a build that uses - the same revision. - - For non-triggered builds that specify ``RepoSource``: - - - If the original build built from the tip of a branch, the - retried build will build from the tip of that branch, which - may not be the same revision as the original build. - - If the original build specified a commit sha or revision ID, - the retried build will use the identical source. - - For builds that specify ``StorageSource``: - - - If the original build pulled source from Cloud Storage - without specifying the generation of the object, the new - build will use the current object, which may be different - from the original build source. - - If the original build pulled source from Cloud Storage and - specified the generation of the object, the new build will - attempt to use the same object, which may or may not be - available depending on the bucket's lifecycle management - settings. - - Returns: - Callable[[~.RetryBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', - request_serializer=cloudbuild.RetryBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['retry_build'] - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the approve build method over gRPC. - - Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. - - If rejected, the returned LRO will be immediately done. - - Returns: - Callable[[~.ApproveBuildRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'approve_build' not in self._stubs: - self._stubs['approve_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ApproveBuild', - request_serializer=cloudbuild.ApproveBuildRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['approve_build'] - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the create build trigger method over gRPC. - - Creates a new ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.CreateBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', - request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['create_build_trigger'] - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the get build trigger method over gRPC. - - Returns information about a ``BuildTrigger``. - - This API is experimental. - - Returns: - Callable[[~.GetBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', - request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['get_build_trigger'] - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Awaitable[cloudbuild.ListBuildTriggersResponse]]: - r"""Return a callable for the list build triggers method over gRPC. - - Lists existing ``BuildTrigger``\ s. - - This API is experimental. - - Returns: - Callable[[~.ListBuildTriggersRequest], - Awaitable[~.ListBuildTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', - request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, - response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, - ) - return self._stubs['list_build_triggers'] - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete build trigger method over gRPC. - - Deletes a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.DeleteBuildTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', - request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_build_trigger'] - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: - r"""Return a callable for the update build trigger method over gRPC. - - Updates a ``BuildTrigger`` by its project ID and trigger ID. - - This API is experimental. - - Returns: - Callable[[~.UpdateBuildTriggerRequest], - Awaitable[~.BuildTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', - request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, - response_deserializer=cloudbuild.BuildTrigger.deserialize, - ) - return self._stubs['update_build_trigger'] - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the run build trigger method over gRPC. - - Runs a ``BuildTrigger`` at a particular source revision. - - To run a regional or global trigger, use the POST request that - includes the location endpoint in the path (ex. - v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). - The POST request that does not include the location endpoint in - the path can only be used when running global triggers. - - Returns: - Callable[[~.RunBuildTriggerRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', - request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['run_build_trigger'] - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - Awaitable[cloudbuild.ReceiveTriggerWebhookResponse]]: - r"""Return a callable for the receive trigger webhook method over gRPC. - - ReceiveTriggerWebhook [Experimental] is called when the API - receives a webhook request targeted at a specific trigger. - - Returns: - Callable[[~.ReceiveTriggerWebhookRequest], - Awaitable[~.ReceiveTriggerWebhookResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'receive_trigger_webhook' not in self._stubs: - self._stubs['receive_trigger_webhook'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ReceiveTriggerWebhook', - request_serializer=cloudbuild.ReceiveTriggerWebhookRequest.serialize, - response_deserializer=cloudbuild.ReceiveTriggerWebhookResponse.deserialize, - ) - return self._stubs['receive_trigger_webhook'] - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create worker pool method over gRPC. - - Creates a ``WorkerPool``. - - Returns: - Callable[[~.CreateWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', - request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_worker_pool'] - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - Awaitable[cloudbuild.WorkerPool]]: - r"""Return a callable for the get worker pool method over gRPC. - - Returns details of a ``WorkerPool``. - - Returns: - Callable[[~.GetWorkerPoolRequest], - Awaitable[~.WorkerPool]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', - request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, - response_deserializer=cloudbuild.WorkerPool.deserialize, - ) - return self._stubs['get_worker_pool'] - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete worker pool method over gRPC. - - Deletes a ``WorkerPool``. - - Returns: - Callable[[~.DeleteWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', - request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_worker_pool'] - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update worker pool method over gRPC. - - Updates a ``WorkerPool``. - - Returns: - Callable[[~.UpdateWorkerPoolRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', - request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_worker_pool'] - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Awaitable[cloudbuild.ListWorkerPoolsResponse]]: - r"""Return a callable for the list worker pools method over gRPC. - - Lists ``WorkerPool``\ s. - - Returns: - Callable[[~.ListWorkerPoolsRequest], - Awaitable[~.ListWorkerPoolsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', - request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, - response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, - ) - return self._stubs['list_worker_pools'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'CloudBuildGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py deleted file mode 100644 index 8d6d8492..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py +++ /dev/null @@ -1,2419 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CloudBuildRestInterceptor: - """Interceptor for CloudBuild. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CloudBuildRestTransport. - - .. code-block:: python - class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): - def pre_approve_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_approve_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_cancel_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_builds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_builds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_build_triggers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_build_triggers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_worker_pools(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_worker_pools(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_receive_trigger_webhook(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_receive_trigger_webhook(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_retry_build(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_retry_build(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_build_trigger(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_build_trigger(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_worker_pool(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_worker_pool(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) - client = CloudBuildClient(transport=transport) - - - """ - def pre_approve_build(self, request: cloudbuild.ApproveBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ApproveBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for approve_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_approve_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for approve_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_cancel_build(self, request: cloudbuild.CancelBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CancelBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: - """Post-rpc interceptor for cancel_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_build(self, request: cloudbuild.CreateBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_build_trigger(self, request: cloudbuild.CreateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for create_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_create_worker_pool(self, request: cloudbuild.CreateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.CreateWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_create_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_delete_build_trigger(self, request: cloudbuild.DeleteBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def pre_delete_worker_pool(self, request: cloudbuild.DeleteWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.DeleteWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_delete_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_build(self, request: cloudbuild.GetBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: - """Post-rpc interceptor for get_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_build_trigger(self, request: cloudbuild.GetBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for get_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_get_worker_pool(self, request: cloudbuild.GetWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.GetWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_worker_pool(self, response: cloudbuild.WorkerPool) -> cloudbuild.WorkerPool: - """Post-rpc interceptor for get_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_builds(self, request: cloudbuild.ListBuildsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_builds - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_builds(self, response: cloudbuild.ListBuildsResponse) -> cloudbuild.ListBuildsResponse: - """Post-rpc interceptor for list_builds - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_build_triggers(self, request: cloudbuild.ListBuildTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListBuildTriggersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_build_triggers - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_build_triggers(self, response: cloudbuild.ListBuildTriggersResponse) -> cloudbuild.ListBuildTriggersResponse: - """Post-rpc interceptor for list_build_triggers - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_list_worker_pools(self, request: cloudbuild.ListWorkerPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ListWorkerPoolsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_worker_pools - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_list_worker_pools(self, response: cloudbuild.ListWorkerPoolsResponse) -> cloudbuild.ListWorkerPoolsResponse: - """Post-rpc interceptor for list_worker_pools - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_receive_trigger_webhook(self, request: cloudbuild.ReceiveTriggerWebhookRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.ReceiveTriggerWebhookRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for receive_trigger_webhook - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_receive_trigger_webhook(self, response: cloudbuild.ReceiveTriggerWebhookResponse) -> cloudbuild.ReceiveTriggerWebhookResponse: - """Post-rpc interceptor for receive_trigger_webhook - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_retry_build(self, request: cloudbuild.RetryBuildRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RetryBuildRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for retry_build - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_retry_build(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for retry_build - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_run_build_trigger(self, request: cloudbuild.RunBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.RunBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_run_build_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for run_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_update_build_trigger(self, request: cloudbuild.UpdateBuildTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateBuildTriggerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_build_trigger - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_update_build_trigger(self, response: cloudbuild.BuildTrigger) -> cloudbuild.BuildTrigger: - """Post-rpc interceptor for update_build_trigger - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - def pre_update_worker_pool(self, request: cloudbuild.UpdateWorkerPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloudbuild.UpdateWorkerPoolRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_worker_pool - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_update_worker_pool(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_worker_pool - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CloudBuildRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CloudBuildRestInterceptor - - -class CloudBuildRestTransport(CloudBuildTransport): - """REST backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - The main concept used by this API is a ``Build``, which describes - the location of the source to build, how to build the source, and - where to store the built artifacts, if any. - - A user can list previously-requested builds or get builds by their - ID to determine the status of the build. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudBuildRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudBuildRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=operations/**}:cancel', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=operations/**}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _ApproveBuild(CloudBuildRestStub): - def __hash__(self): - return hash("ApproveBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ApproveBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the approve build method over HTTP. - - Args: - request (~.cloudbuild.ApproveBuildRequest): - The request object. Request to approve or reject a - pending build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/builds/*}:approve', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:approve', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_approve_build(request, metadata) - pb_request = cloudbuild.ApproveBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_approve_build(resp) - return resp - - class _CancelBuild(CloudBuildRestStub): - def __hash__(self): - return hash("CancelBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CancelBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.Build: - r"""Call the cancel build method over HTTP. - - Args: - request (~.cloudbuild.CancelBuildRequest): - The request object. Request to cancel an ongoing build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.Build: - A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find - source code, how to build it (for example, the builder - image to run on the source), and where to store the - built artifacts. - - Fields can include the following variables, which will - be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by - RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified - by RepoSource or resolved from the specified branch - or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds/{id}:cancel', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:cancel', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_cancel_build(request, metadata) - pb_request = cloudbuild.CancelBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.Build() - pb_resp = cloudbuild.Build.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_cancel_build(resp) - return resp - - class _CreateBuild(CloudBuildRestStub): - def __hash__(self): - return hash("CreateBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create build method over HTTP. - - Args: - request (~.cloudbuild.CreateBuildRequest): - The request object. Request to create a new build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds', - 'body': 'build', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/builds', - 'body': 'build', - }, - ] - request, metadata = self._interceptor.pre_create_build(request, metadata) - pb_request = cloudbuild.CreateBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_build(resp) - return resp - - class _CreateBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("CreateBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the create build trigger method over HTTP. - - Args: - request (~.cloudbuild.CreateBuildTriggerRequest): - The request object. Request to create a new ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers', - 'body': 'trigger', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', - }, - ] - request, metadata = self._interceptor.pre_create_build_trigger(request, metadata) - pb_request = cloudbuild.CreateBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_build_trigger(resp) - return resp - - class _CreateWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("CreateWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "workerPoolId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.CreateWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create worker pool method over HTTP. - - Args: - request (~.cloudbuild.CreateWorkerPoolRequest): - The request object. Request to create a new ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', - 'body': 'worker_pool', - }, - ] - request, metadata = self._interceptor.pre_create_worker_pool(request, metadata) - pb_request = cloudbuild.CreateWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_worker_pool(resp) - return resp - - class _DeleteBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("DeleteBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.DeleteBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete build trigger method over HTTP. - - Args: - request (~.cloudbuild.DeleteBuildTriggerRequest): - The request object. Request to delete a ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_build_trigger(request, metadata) - pb_request = cloudbuild.DeleteBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("DeleteWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.DeleteWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete worker pool method over HTTP. - - Args: - request (~.cloudbuild.DeleteWorkerPoolRequest): - The request object. Request to delete a ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_worker_pool(request, metadata) - pb_request = cloudbuild.DeleteWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_worker_pool(resp) - return resp - - class _GetBuild(CloudBuildRestStub): - def __hash__(self): - return hash("GetBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.Build: - r"""Call the get build method over HTTP. - - Args: - request (~.cloudbuild.GetBuildRequest): - The request object. Request to get a build. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.Build: - A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find - source code, how to build it (for example, the builder - image to run on the source), and where to store the - built artifacts. - - Fields can include the following variables, which will - be expanded when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by - RepoSource. - - $BRANCH_NAME: the branch name specified by - RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified - by RepoSource or resolved from the specified branch - or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or - $COMMIT_SHA. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/builds/{id}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}', - }, - ] - request, metadata = self._interceptor.pre_get_build(request, metadata) - pb_request = cloudbuild.GetBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.Build() - pb_resp = cloudbuild.Build.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_build(resp) - return resp - - class _GetBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("GetBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the get build trigger method over HTTP. - - Args: - request (~.cloudbuild.GetBuildTriggerRequest): - The request object. Returns the ``BuildTrigger`` with the specified ID. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] - request, metadata = self._interceptor.pre_get_build_trigger(request, metadata) - pb_request = cloudbuild.GetBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_build_trigger(resp) - return resp - - class _GetWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("GetWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.GetWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.WorkerPool: - r"""Call the get worker pool method over HTTP. - - Args: - request (~.cloudbuild.GetWorkerPoolRequest): - The request object. Request to get a ``WorkerPool`` with the specified name. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.WorkerPool: - Configuration for a ``WorkerPool``. - - Cloud Build owns and maintains a pool of workers for - general use and have no access to a project's private - network. By default, builds submitted to Cloud Build - will use a worker from this pool. - - If your build needs access to resources on a private - network, create and use a ``WorkerPool`` to run your - builds. Private ``WorkerPool``\ s give your builds - access to any single VPC network that you administer, - including any on-prem resources connected to that VPC - network. For an overview of private pools, see `Private - pools - overview `__. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/workerPools/*}', - }, - ] - request, metadata = self._interceptor.pre_get_worker_pool(request, metadata) - pb_request = cloudbuild.GetWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.WorkerPool() - pb_resp = cloudbuild.WorkerPool.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_worker_pool(resp) - return resp - - class _ListBuilds(CloudBuildRestStub): - def __hash__(self): - return hash("ListBuilds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListBuildsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListBuildsResponse: - r"""Call the list builds method over HTTP. - - Args: - request (~.cloudbuild.ListBuildsRequest): - The request object. Request to list builds. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListBuildsResponse: - Response including listed builds. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/builds', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/builds', - }, - ] - request, metadata = self._interceptor.pre_list_builds(request, metadata) - pb_request = cloudbuild.ListBuildsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListBuildsResponse() - pb_resp = cloudbuild.ListBuildsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_builds(resp) - return resp - - class _ListBuildTriggers(CloudBuildRestStub): - def __hash__(self): - return hash("ListBuildTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListBuildTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListBuildTriggersResponse: - r"""Call the list build triggers method over HTTP. - - Args: - request (~.cloudbuild.ListBuildTriggersRequest): - The request object. Request to list existing ``BuildTriggers``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListBuildTriggersResponse: - Response containing existing ``BuildTriggers``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/triggers', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - }, - ] - request, metadata = self._interceptor.pre_list_build_triggers(request, metadata) - pb_request = cloudbuild.ListBuildTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListBuildTriggersResponse() - pb_resp = cloudbuild.ListBuildTriggersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_build_triggers(resp) - return resp - - class _ListWorkerPools(CloudBuildRestStub): - def __hash__(self): - return hash("ListWorkerPools") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.ListWorkerPoolsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ListWorkerPoolsResponse: - r"""Call the list worker pools method over HTTP. - - Args: - request (~.cloudbuild.ListWorkerPoolsRequest): - The request object. Request to list ``WorkerPool``\ s. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ListWorkerPoolsResponse: - Response containing existing ``WorkerPools``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/workerPools', - }, - ] - request, metadata = self._interceptor.pre_list_worker_pools(request, metadata) - pb_request = cloudbuild.ListWorkerPoolsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ListWorkerPoolsResponse() - pb_resp = cloudbuild.ListWorkerPoolsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_worker_pools(resp) - return resp - - class _ReceiveTriggerWebhook(CloudBuildRestStub): - def __hash__(self): - return hash("ReceiveTriggerWebhook") - - def __call__(self, - request: cloudbuild.ReceiveTriggerWebhookRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.ReceiveTriggerWebhookResponse: - r"""Call the receive trigger webhook method over HTTP. - - Args: - request (~.cloudbuild.ReceiveTriggerWebhookRequest): - The request object. ReceiveTriggerWebhookRequest [Experimental] is the - request object accepted by the ReceiveTriggerWebhook - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.ReceiveTriggerWebhookResponse: - ReceiveTriggerWebhookResponse [Experimental] is the - response object for the ReceiveTriggerWebhook method. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers/{trigger}:webhook', - 'body': 'body', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:webhook', - 'body': 'body', - }, - ] - request, metadata = self._interceptor.pre_receive_trigger_webhook(request, metadata) - pb_request = cloudbuild.ReceiveTriggerWebhookRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.ReceiveTriggerWebhookResponse() - pb_resp = cloudbuild.ReceiveTriggerWebhookResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_receive_trigger_webhook(resp) - return resp - - class _RetryBuild(CloudBuildRestStub): - def __hash__(self): - return hash("RetryBuild") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.RetryBuildRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the retry build method over HTTP. - - Args: - request (~.cloudbuild.RetryBuildRequest): - The request object. Specifies a build to retry. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/builds/{id}:retry', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/builds/*}:retry', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_retry_build(request, metadata) - pb_request = cloudbuild.RetryBuildRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_retry_build(resp) - return resp - - class _RunBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("RunBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.RunBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the run build trigger method over HTTP. - - Args: - request (~.cloudbuild.RunBuildTriggerRequest): - The request object. Specifies a build trigger to run and - the source to use. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}:run', - 'body': 'source', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}:run', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_build_trigger(request, metadata) - pb_request = cloudbuild.RunBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_run_build_trigger(resp) - return resp - - class _UpdateBuildTrigger(CloudBuildRestStub): - def __hash__(self): - return hash("UpdateBuildTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.UpdateBuildTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> cloudbuild.BuildTrigger: - r"""Call the update build trigger method over HTTP. - - Args: - request (~.cloudbuild.UpdateBuildTriggerRequest): - The request object. Request to update an existing ``BuildTrigger``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloudbuild.BuildTrigger: - Configuration for an automated build - in response to source repository - changes. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/projects/{project_id}/triggers/{trigger_id}', - 'body': 'trigger', - }, -{ - 'method': 'patch', - 'uri': '/v1/{trigger.resource_name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', - }, - ] - request, metadata = self._interceptor.pre_update_build_trigger(request, metadata) - pb_request = cloudbuild.UpdateBuildTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloudbuild.BuildTrigger() - pb_resp = cloudbuild.BuildTrigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_build_trigger(resp) - return resp - - class _UpdateWorkerPool(CloudBuildRestStub): - def __hash__(self): - return hash("UpdateWorkerPool") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: cloudbuild.UpdateWorkerPoolRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update worker pool method over HTTP. - - Args: - request (~.cloudbuild.UpdateWorkerPoolRequest): - The request object. Request to update a ``WorkerPool``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}', - 'body': 'worker_pool', - }, - ] - request, metadata = self._interceptor.pre_update_worker_pool(request, metadata) - pb_request = cloudbuild.UpdateWorkerPoolRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_worker_pool(resp) - return resp - - @property - def approve_build(self) -> Callable[ - [cloudbuild.ApproveBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ApproveBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - cloudbuild.Build]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - cloudbuild.Build]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - cloudbuild.WorkerPool]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - cloudbuild.ListBuildsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuilds(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - cloudbuild.ListBuildTriggersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuildTriggers(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - cloudbuild.ListWorkerPoolsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListWorkerPools(self._session, self._host, self._interceptor) # type: ignore - - @property - def receive_trigger_webhook(self) -> Callable[ - [cloudbuild.ReceiveTriggerWebhookRequest], - cloudbuild.ReceiveTriggerWebhookResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReceiveTriggerWebhook(self._session, self._host, self._interceptor) # type: ignore - - @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RetryBuild(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - cloudbuild.BuildTrigger]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateWorkerPool(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'CloudBuildRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py deleted file mode 100644 index fab30741..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudbuild import ( - ApprovalConfig, - ApprovalResult, - ApproveBuildRequest, - ArtifactResult, - Artifacts, - Build, - BuildApproval, - BuildOperationMetadata, - BuildOptions, - BuildStep, - BuildTrigger, - BuiltImage, - CancelBuildRequest, - CreateBuildRequest, - CreateBuildTriggerRequest, - CreateWorkerPoolOperationMetadata, - CreateWorkerPoolRequest, - DeleteBuildTriggerRequest, - DeleteWorkerPoolOperationMetadata, - DeleteWorkerPoolRequest, - FileHashes, - GetBuildRequest, - GetBuildTriggerRequest, - GetWorkerPoolRequest, - GitHubEventsConfig, - GitSource, - Hash, - InlineSecret, - ListBuildsRequest, - ListBuildsResponse, - ListBuildTriggersRequest, - ListBuildTriggersResponse, - ListWorkerPoolsRequest, - ListWorkerPoolsResponse, - PrivatePoolV1Config, - PubsubConfig, - PullRequestFilter, - PushFilter, - ReceiveTriggerWebhookRequest, - ReceiveTriggerWebhookResponse, - RepositoryEventConfig, - RepoSource, - Results, - RetryBuildRequest, - RunBuildTriggerRequest, - Secret, - SecretManagerSecret, - Secrets, - Source, - SourceProvenance, - StorageSource, - StorageSourceManifest, - TimeSpan, - UpdateBuildTriggerRequest, - UpdateWorkerPoolOperationMetadata, - UpdateWorkerPoolRequest, - UploadedMavenArtifact, - UploadedNpmPackage, - UploadedPythonPackage, - Volume, - WebhookConfig, - WorkerPool, -) - -__all__ = ( - 'ApprovalConfig', - 'ApprovalResult', - 'ApproveBuildRequest', - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildApproval', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolOperationMetadata', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolOperationMetadata', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'GitSource', - 'Hash', - 'InlineSecret', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'PrivatePoolV1Config', - 'PubsubConfig', - 'PullRequestFilter', - 'PushFilter', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'RepositoryEventConfig', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'SecretManagerSecret', - 'Secrets', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'StorageSourceManifest', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolRequest', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'UploadedPythonPackage', - 'Volume', - 'WebhookConfig', - 'WorkerPool', -) diff --git a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py deleted file mode 100644 index 838474d2..00000000 --- a/owl-bot-staging/v1/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ /dev/null @@ -1,3680 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v1', - manifest={ - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'StorageSource', - 'GitSource', - 'RepoSource', - 'StorageSourceManifest', - 'Source', - 'BuiltImage', - 'UploadedPythonPackage', - 'UploadedMavenArtifact', - 'UploadedNpmPackage', - 'BuildStep', - 'Volume', - 'Results', - 'ArtifactResult', - 'Build', - 'Artifacts', - 'TimeSpan', - 'BuildOperationMetadata', - 'SourceProvenance', - 'FileHashes', - 'Hash', - 'Secrets', - 'InlineSecret', - 'SecretManagerSecret', - 'Secret', - 'CreateBuildRequest', - 'GetBuildRequest', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'CancelBuildRequest', - 'ApproveBuildRequest', - 'BuildApproval', - 'ApprovalConfig', - 'ApprovalResult', - 'BuildTrigger', - 'RepositoryEventConfig', - 'GitHubEventsConfig', - 'PubsubConfig', - 'WebhookConfig', - 'PullRequestFilter', - 'PushFilter', - 'CreateBuildTriggerRequest', - 'GetBuildTriggerRequest', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'DeleteBuildTriggerRequest', - 'UpdateBuildTriggerRequest', - 'BuildOptions', - 'ReceiveTriggerWebhookRequest', - 'ReceiveTriggerWebhookResponse', - 'WorkerPool', - 'PrivatePoolV1Config', - 'CreateWorkerPoolRequest', - 'GetWorkerPoolRequest', - 'DeleteWorkerPoolRequest', - 'UpdateWorkerPoolRequest', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'CreateWorkerPoolOperationMetadata', - 'UpdateWorkerPoolOperationMetadata', - 'DeleteWorkerPoolOperationMetadata', - }, -) - - -class RetryBuildRequest(proto.Message): - r"""Specifies a build to retry. - - Attributes: - name (str): - The name of the ``Build`` to retry. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. Build ID of the original build. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RunBuildTriggerRequest(proto.Message): - r"""Specifies a build trigger to run and the source to use. - - Attributes: - name (str): - The name of the ``Trigger`` to run. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project. - trigger_id (str): - Required. ID of the trigger. - source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Source to build against this trigger. - Branch and tag names cannot consist of regular - expressions. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=3, - message='RepoSource', - ) - - -class StorageSource(proto.Message): - r"""Location of the source in an archive file in Cloud Storage. - - Attributes: - bucket (str): - Cloud Storage bucket containing the source (see `Bucket Name - Requirements `__). - object_ (str): - Cloud Storage object containing the source. - - This object must be a zipped (``.zip``) or gzipped archive - file (``.tar.gz``) containing source to build. - generation (int): - Cloud Storage generation for the object. If - the generation is omitted, the latest generation - will be used. - """ - - bucket: str = proto.Field( - proto.STRING, - number=1, - ) - object_: str = proto.Field( - proto.STRING, - number=2, - ) - generation: int = proto.Field( - proto.INT64, - number=3, - ) - - -class GitSource(proto.Message): - r"""Location of the source in any accessible Git repository. - - Attributes: - url (str): - Location of the Git repo to build. - - This will be used as a ``git remote``, see - https://git-scm.com/docs/git-remote. - dir_ (str): - Directory, relative to the source root, in which to run the - build. - - This must be a relative path. If a step's ``dir`` is - specified and is an absolute path, this value is ignored for - that step's execution. - revision (str): - The revision to fetch from the Git repository such as a - branch, a tag, a commit SHA, or any Git ref. - - Cloud Build uses ``git fetch`` to fetch the revision from - the Git repository; therefore make sure that the string you - provide for ``revision`` is parsable by the command. For - information on string values accepted by ``git fetch``, see - https://git-scm.com/docs/gitrevisions#_specifying_revisions. - For information on ``git fetch``, see - https://git-scm.com/docs/git-fetch. - """ - - url: str = proto.Field( - proto.STRING, - number=1, - ) - dir_: str = proto.Field( - proto.STRING, - number=5, - ) - revision: str = proto.Field( - proto.STRING, - number=6, - ) - - -class RepoSource(proto.Message): - r"""Location of the source in a Google Cloud Source Repository. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - ID of the project that owns the Cloud Source - Repository. If omitted, the project ID - requesting the build is assumed. - repo_name (str): - Name of the Cloud Source Repository. - branch_name (str): - Regex matching branches to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``revision``. - tag_name (str): - Regex matching tags to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``revision``. - commit_sha (str): - Explicit commit SHA to build. - - This field is a member of `oneof`_ ``revision``. - dir_ (str): - Directory, relative to the source root, in which to run the - build. - - This must be a relative path. If a step's ``dir`` is - specified and is an absolute path, this value is ignored for - that step's execution. - invert_regex (bool): - Only trigger a build if the revision regex - does NOT match the revision regex. - substitutions (MutableMapping[str, str]): - Substitutions to use in a triggered build. - Should only be used with RunBuildTrigger - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - repo_name: str = proto.Field( - proto.STRING, - number=2, - ) - branch_name: str = proto.Field( - proto.STRING, - number=3, - oneof='revision', - ) - tag_name: str = proto.Field( - proto.STRING, - number=4, - oneof='revision', - ) - commit_sha: str = proto.Field( - proto.STRING, - number=5, - oneof='revision', - ) - dir_: str = proto.Field( - proto.STRING, - number=7, - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=8, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - - -class StorageSourceManifest(proto.Message): - r"""Location of the source manifest in Cloud Storage. This feature is in - Preview; see description - `here `__. - - Attributes: - bucket (str): - Cloud Storage bucket containing the source manifest (see - `Bucket Name - Requirements `__). - object_ (str): - Cloud Storage object containing the source - manifest. - This object must be a JSON file. - generation (int): - Cloud Storage generation for the object. If - the generation is omitted, the latest generation - will be used. - """ - - bucket: str = proto.Field( - proto.STRING, - number=1, - ) - object_: str = proto.Field( - proto.STRING, - number=2, - ) - generation: int = proto.Field( - proto.INT64, - number=3, - ) - - -class Source(proto.Message): - r"""Location of the source in a supported storage service. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): - If provided, get the source from this - location in Cloud Storage. - - This field is a member of `oneof`_ ``source``. - repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - If provided, get the source from this - location in a Cloud Source Repository. - - This field is a member of `oneof`_ ``source``. - git_source (google.cloud.devtools.cloudbuild_v1.types.GitSource): - If provided, get the source from this Git - repository. - - This field is a member of `oneof`_ ``source``. - storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - If provided, get the source from this manifest in Cloud - Storage. This feature is in Preview; see description - `here `__. - - This field is a member of `oneof`_ ``source``. - """ - - storage_source: 'StorageSource' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='StorageSource', - ) - repo_source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='RepoSource', - ) - git_source: 'GitSource' = proto.Field( - proto.MESSAGE, - number=5, - oneof='source', - message='GitSource', - ) - storage_source_manifest: 'StorageSourceManifest' = proto.Field( - proto.MESSAGE, - number=8, - oneof='source', - message='StorageSourceManifest', - ) - - -class BuiltImage(proto.Message): - r"""An image built by the pipeline. - - Attributes: - name (str): - Name used to push the container image to Google Container - Registry, as presented to ``docker push``. - digest (str): - Docker Registry 2.0 digest. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified image. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - digest: str = proto.Field( - proto.STRING, - number=3, - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=4, - message='TimeSpan', - ) - - -class UploadedPythonPackage(proto.Message): - r"""Artifact uploaded using the PythonPackage directive. - - Attributes: - uri (str): - URI of the uploaded artifact. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the Python Artifact. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class UploadedMavenArtifact(proto.Message): - r"""A Maven artifact uploaded using the MavenArtifact directive. - - Attributes: - uri (str): - URI of the uploaded artifact. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the Maven Artifact. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class UploadedNpmPackage(proto.Message): - r"""An npm package uploaded to Artifact Registry using the - NpmPackage directive. - - Attributes: - uri (str): - URI of the uploaded npm package. - file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): - Hash types and values of the npm package. - push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing the specified artifact. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - file_hashes: 'FileHashes' = proto.Field( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - push_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - -class BuildStep(proto.Message): - r"""A step in the build pipeline. - - Attributes: - name (str): - Required. The name of the container image that will run this - particular build step. - - If the image is available in the host's Docker daemon's - cache, it will be run directly. If not, the host will - attempt to pull the image first, using the builder service - account's credentials if necessary. - - The Docker daemon's cache will already have the latest - versions of all of the officially supported build steps - (https://github.com/GoogleCloudPlatform/cloud-builders). The - Docker daemon will also have cached many of the layers for - some popular images, like "ubuntu", "debian", but they will - be refreshed at the time you attempt to use them. - - If you built an image in a previous build step, it will be - stored in the host's Docker daemon's cache and is available - to use as the name for a later build step. - env (MutableSequence[str]): - A list of environment variable definitions to - be used when running a step. - The elements are of the form "KEY=VALUE" for the - environment variable "KEY" being given the value - "VALUE". - args (MutableSequence[str]): - A list of arguments that will be presented to the step when - it is started. - - If the image used to run the step's container has an - entrypoint, the ``args`` are used as arguments to that - entrypoint. If the image does not define an entrypoint, the - first element in args is used as the entrypoint, and the - remainder will be used as arguments. - dir_ (str): - Working directory to use when running this step's container. - - If this value is a relative path, it is relative to the - build's working directory. If this value is absolute, it may - be outside the build's working directory, in which case the - contents of the path may not be persisted across build step - executions, unless a ``volume`` for that path is specified. - - If the build specifies a ``RepoSource`` with ``dir`` and a - step with a ``dir``, which specifies an absolute path, the - ``RepoSource`` ``dir`` is ignored for the step's execution. - id (str): - Unique identifier for this build step, used in ``wait_for`` - to reference this build step as a dependency. - wait_for (MutableSequence[str]): - The ID(s) of the step(s) that this build step depends on. - This build step will not start until all the build steps in - ``wait_for`` have completed successfully. If ``wait_for`` is - empty, this build step will start when all previous build - steps in the ``Build.Steps`` list have completed - successfully. - entrypoint (str): - Entrypoint to be used instead of the build - step image's default entrypoint. If unset, the - image's default entrypoint is used. - secret_env (MutableSequence[str]): - A list of environment variables which are encrypted using a - Cloud Key Management Service crypto key. These values must - be specified in the build's ``Secret``. - volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): - List of volumes to mount into the build step. - Each volume is created as an empty volume prior - to execution of the build step. Upon completion - of the build, volumes and their contents are - discarded. - - Using a named volume in only one step is not - valid as it is indicative of a build request - with an incorrect configuration. - timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - executing this build step. - pull_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pulling this build step's builder image only. - timeout (google.protobuf.duration_pb2.Duration): - Time limit for executing this build step. If - not defined, the step has no time limit and will - be allowed to continue to run until either it - completes or the build itself times out. - status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): - Output only. Status of the build step. At - this time, build step status is only updated on - build completion; step status is not updated in - real-time as the build progresses. - allow_failure (bool): - Allow this build step to fail without failing the entire - build. - - If false, the entire build will fail if this step fails. - Otherwise, the build will succeed, but this step will still - have a failure status. Error information will be reported in - the failure_detail field. - exit_code (int): - Output only. Return code from running the - step. - allow_exit_codes (MutableSequence[int]): - Allow this build step to fail without failing the entire - build if and only if the exit code is one of the specified - codes. If allow_failure is also specified, this field will - take precedence. - script (str): - A shell script to be executed in the step. - When script is provided, the user cannot specify - the entrypoint or args. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - dir_: str = proto.Field( - proto.STRING, - number=4, - ) - id: str = proto.Field( - proto.STRING, - number=5, - ) - wait_for: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - entrypoint: str = proto.Field( - proto.STRING, - number=7, - ) - secret_env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - volumes: MutableSequence['Volume'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='Volume', - ) - timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=10, - message='TimeSpan', - ) - pull_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=13, - message='TimeSpan', - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=11, - message=duration_pb2.Duration, - ) - status: 'Build.Status' = proto.Field( - proto.ENUM, - number=12, - enum='Build.Status', - ) - allow_failure: bool = proto.Field( - proto.BOOL, - number=14, - ) - exit_code: int = proto.Field( - proto.INT32, - number=16, - ) - allow_exit_codes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=18, - ) - script: str = proto.Field( - proto.STRING, - number=19, - ) - - -class Volume(proto.Message): - r"""Volume describes a Docker container volume which is mounted - into build steps in order to persist files across build step - execution. - - Attributes: - name (str): - Name of the volume to mount. - Volume names must be unique per build step and - must be valid names for Docker volumes. Each - named volume must be used by at least two build - steps. - path (str): - Path at which to mount the volume. - Paths must be absolute and cannot conflict with - other volume paths on the same build step or - with certain reserved volume paths. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Results(proto.Message): - r"""Artifacts created by the build pipeline. - - Attributes: - images (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuiltImage]): - Container images that were built as a part of - the build. - build_step_images (MutableSequence[str]): - List of build step digests, in the order - corresponding to build step indices. - artifact_manifest (str): - Path to the artifact manifest for - non-container artifacts uploaded to Cloud - Storage. Only populated when artifacts are - uploaded to Cloud Storage. - num_artifacts (int): - Number of non-container artifacts uploaded to - Cloud Storage. Only populated when artifacts are - uploaded to Cloud Storage. - build_step_outputs (MutableSequence[bytes]): - List of build step outputs, produced by builder images, in - the order corresponding to build step indices. - - `Cloud - Builders `__ - can produce this output by writing to - ``$BUILDER_OUTPUT/output``. Only the first 4KB of data is - stored. - artifact_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Time to push all non-container artifacts to - Cloud Storage. - python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedPythonPackage]): - Python artifacts uploaded to Artifact - Registry at the end of the build. - maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedMavenArtifact]): - Maven artifacts uploaded to Artifact Registry - at the end of the build. - npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedNpmPackage]): - Npm packages uploaded to Artifact Registry at - the end of the build. - """ - - images: MutableSequence['BuiltImage'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='BuiltImage', - ) - build_step_images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - artifact_manifest: str = proto.Field( - proto.STRING, - number=4, - ) - num_artifacts: int = proto.Field( - proto.INT64, - number=5, - ) - build_step_outputs: MutableSequence[bytes] = proto.RepeatedField( - proto.BYTES, - number=6, - ) - artifact_timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=7, - message='TimeSpan', - ) - python_packages: MutableSequence['UploadedPythonPackage'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='UploadedPythonPackage', - ) - maven_artifacts: MutableSequence['UploadedMavenArtifact'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='UploadedMavenArtifact', - ) - npm_packages: MutableSequence['UploadedNpmPackage'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='UploadedNpmPackage', - ) - - -class ArtifactResult(proto.Message): - r"""An artifact that was uploaded during a build. This - is a single record in the artifact manifest JSON file. - - Attributes: - location (str): - The path of an artifact in a Cloud Storage bucket, with the - generation number. For example, - ``gs://mybucket/path/to/output.jar#generation``. - file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): - The file hash of the artifact. - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - file_hash: MutableSequence['FileHashes'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FileHashes', - ) - - -class Build(proto.Message): - r"""A build resource in the Cloud Build API. - - At a high level, a ``Build`` describes where to find source code, - how to build it (for example, the builder image to run on the - source), and where to store the built artifacts. - - Fields can include the following variables, which will be expanded - when the build is created: - - - $PROJECT_ID: the project ID of the build. - - $PROJECT_NUMBER: the project number of the build. - - $LOCATION: the location/region of the build. - - $BUILD_ID: the autogenerated ID of the build. - - $REPO_NAME: the source repository name specified by RepoSource. - - $BRANCH_NAME: the branch name specified by RepoSource. - - $TAG_NAME: the tag name specified by RepoSource. - - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by - RepoSource or resolved from the specified branch or tag. - - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. - - Attributes: - name (str): - Output only. The 'Build' name with format: - ``projects/{project}/locations/{location}/builds/{build}``, - where {build} is a unique identifier generated by the - service. - id (str): - Output only. Unique identifier of the build. - project_id (str): - Output only. ID of the project. - status (google.cloud.devtools.cloudbuild_v1.types.Build.Status): - Output only. Status of the build. - status_detail (str): - Output only. Customer-readable message about - the current status. - source (google.cloud.devtools.cloudbuild_v1.types.Source): - The location of the source files to build. - steps (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildStep]): - Required. The operations to be performed on - the workspace. - results (google.cloud.devtools.cloudbuild_v1.types.Results): - Output only. Results of the build. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to - create the build was received. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which execution of the - build was started. - finish_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which execution of the build was - finished. - - The difference between finish_time and start_time is the - duration of the build's execution. - timeout (google.protobuf.duration_pb2.Duration): - Amount of time that this build should be allowed to run, to - second granularity. If this amount of time elapses, work on - the build will cease and the build status will be - ``TIMEOUT``. - - ``timeout`` starts ticking from ``startTime``. - - Default time is 60 minutes. - images (MutableSequence[str]): - A list of images to be pushed upon the successful completion - of all build steps. - - The images are pushed using the builder service account's - credentials. - - The digests of the pushed images will be stored in the - ``Build`` resource's results field. - - If any of the images fail to be pushed, the build status is - marked ``FAILURE``. - queue_ttl (google.protobuf.duration_pb2.Duration): - TTL in queue for this build. If provided and the build is - enqueued longer than this value, the build will expire and - the build status will be ``EXPIRED``. - - The TTL starts ticking from create_time. - artifacts (google.cloud.devtools.cloudbuild_v1.types.Artifacts): - Artifacts produced by the build that should - be uploaded upon successful completion of all - build steps. - logs_bucket (str): - Cloud Storage bucket where logs should be written (see - `Bucket Name - Requirements `__). - Logs file names will be of the format - ``${logs_bucket}/log-${build_id}.txt``. - source_provenance (google.cloud.devtools.cloudbuild_v1.types.SourceProvenance): - Output only. A permanent fixed identifier for - source. - build_trigger_id (str): - Output only. The ID of the ``BuildTrigger`` that triggered - this build, if it was triggered automatically. - options (google.cloud.devtools.cloudbuild_v1.types.BuildOptions): - Special options for this build. - log_url (str): - Output only. URL to logs for this build in - Google Cloud Console. - substitutions (MutableMapping[str, str]): - Substitutions data for ``Build`` resource. - tags (MutableSequence[str]): - Tags for annotation of a ``Build``. These are not docker - tags. - secrets (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Secret]): - Secrets to decrypt using Cloud Key Management Service. Note: - Secret Manager is the recommended technique for managing - sensitive data with Cloud Build. Use ``available_secrets`` - to configure builds to access secrets from Secret Manager. - For instructions, see: - https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets - timing (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.TimeSpan]): - Output only. Stores timing information for phases of the - build. Valid keys are: - - - BUILD: time to execute all build steps. - - PUSH: time to push all artifacts including docker images - and non docker artifacts. - - FETCHSOURCE: time to fetch source. - - SETUPBUILD: time to set up build. - - If the build does not specify source or images, these keys - will not be included. - approval (google.cloud.devtools.cloudbuild_v1.types.BuildApproval): - Output only. Describes this build's approval - configuration, status, and result. - service_account (str): - IAM service account whose credentials will be used at build - runtime. Must be of the format - ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT - can be email address or uniqueId of the service account. - available_secrets (google.cloud.devtools.cloudbuild_v1.types.Secrets): - Secrets and secret environment variables. - warnings (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build.Warning]): - Output only. Non-fatal problems encountered - during the execution of the build. - failure_info (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo): - Output only. Contains information about the - build when status=FAILURE. - """ - class Status(proto.Enum): - r"""Possible status of a build or build step. - - Values: - STATUS_UNKNOWN (0): - Status of the build is unknown. - PENDING (10): - Build has been created and is pending - execution and queuing. It has not been queued. - QUEUED (1): - Build or step is queued; work has not yet - begun. - WORKING (2): - Build or step is being executed. - SUCCESS (3): - Build or step finished successfully. - FAILURE (4): - Build or step failed to complete - successfully. - INTERNAL_ERROR (5): - Build or step failed due to an internal - cause. - TIMEOUT (6): - Build or step took longer than was allowed. - CANCELLED (7): - Build or step was canceled by a user. - EXPIRED (9): - Build was enqueued for longer than the value of - ``queue_ttl``. - """ - STATUS_UNKNOWN = 0 - PENDING = 10 - QUEUED = 1 - WORKING = 2 - SUCCESS = 3 - FAILURE = 4 - INTERNAL_ERROR = 5 - TIMEOUT = 6 - CANCELLED = 7 - EXPIRED = 9 - - class Warning(proto.Message): - r"""A non-fatal problem encountered during the execution of the - build. - - Attributes: - text (str): - Explanation of the warning generated. - priority (google.cloud.devtools.cloudbuild_v1.types.Build.Warning.Priority): - The priority for this warning. - """ - class Priority(proto.Enum): - r"""The relative importance of this warning. - - Values: - PRIORITY_UNSPECIFIED (0): - Should not be used. - INFO (1): - e.g. deprecation warnings and alternative - feature highlights. - WARNING (2): - e.g. automated detection of possible issues - with the build. - ALERT (3): - e.g. alerts that a feature used in the build - is pending removal - """ - PRIORITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ALERT = 3 - - text: str = proto.Field( - proto.STRING, - number=1, - ) - priority: 'Build.Warning.Priority' = proto.Field( - proto.ENUM, - number=2, - enum='Build.Warning.Priority', - ) - - class FailureInfo(proto.Message): - r"""A fatal problem encountered during the execution of the - build. - - Attributes: - type_ (google.cloud.devtools.cloudbuild_v1.types.Build.FailureInfo.FailureType): - The name of the failure. - detail (str): - Explains the failure issue in more detail - using hard-coded text. - """ - class FailureType(proto.Enum): - r"""The name of a fatal problem encountered during the execution - of the build. - - Values: - FAILURE_TYPE_UNSPECIFIED (0): - Type unspecified - PUSH_FAILED (1): - Unable to push the image to the repository. - PUSH_IMAGE_NOT_FOUND (2): - Final image not found. - PUSH_NOT_AUTHORIZED (3): - Unauthorized push of the final image. - LOGGING_FAILURE (4): - Backend logging failures. Should retry. - USER_BUILD_STEP (5): - A build step has failed. - FETCH_SOURCE_FAILED (6): - The source fetching has failed. - """ - FAILURE_TYPE_UNSPECIFIED = 0 - PUSH_FAILED = 1 - PUSH_IMAGE_NOT_FOUND = 2 - PUSH_NOT_AUTHORIZED = 3 - LOGGING_FAILURE = 4 - USER_BUILD_STEP = 5 - FETCH_SOURCE_FAILED = 6 - - type_: 'Build.FailureInfo.FailureType' = proto.Field( - proto.ENUM, - number=1, - enum='Build.FailureInfo.FailureType', - ) - detail: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=45, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=16, - ) - status: Status = proto.Field( - proto.ENUM, - number=2, - enum=Status, - ) - status_detail: str = proto.Field( - proto.STRING, - number=24, - ) - source: 'Source' = proto.Field( - proto.MESSAGE, - number=3, - message='Source', - ) - steps: MutableSequence['BuildStep'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='BuildStep', - ) - results: 'Results' = proto.Field( - proto.MESSAGE, - number=10, - message='Results', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - finish_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=12, - message=duration_pb2.Duration, - ) - images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - queue_ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=40, - message=duration_pb2.Duration, - ) - artifacts: 'Artifacts' = proto.Field( - proto.MESSAGE, - number=37, - message='Artifacts', - ) - logs_bucket: str = proto.Field( - proto.STRING, - number=19, - ) - source_provenance: 'SourceProvenance' = proto.Field( - proto.MESSAGE, - number=21, - message='SourceProvenance', - ) - build_trigger_id: str = proto.Field( - proto.STRING, - number=22, - ) - options: 'BuildOptions' = proto.Field( - proto.MESSAGE, - number=23, - message='BuildOptions', - ) - log_url: str = proto.Field( - proto.STRING, - number=25, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=29, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=31, - ) - secrets: MutableSequence['Secret'] = proto.RepeatedField( - proto.MESSAGE, - number=32, - message='Secret', - ) - timing: MutableMapping[str, 'TimeSpan'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=33, - message='TimeSpan', - ) - approval: 'BuildApproval' = proto.Field( - proto.MESSAGE, - number=44, - message='BuildApproval', - ) - service_account: str = proto.Field( - proto.STRING, - number=42, - ) - available_secrets: 'Secrets' = proto.Field( - proto.MESSAGE, - number=47, - message='Secrets', - ) - warnings: MutableSequence[Warning] = proto.RepeatedField( - proto.MESSAGE, - number=49, - message=Warning, - ) - failure_info: FailureInfo = proto.Field( - proto.MESSAGE, - number=51, - message=FailureInfo, - ) - - -class Artifacts(proto.Message): - r"""Artifacts produced by a build that should be uploaded upon - successful completion of all build steps. - - Attributes: - images (MutableSequence[str]): - A list of images to be pushed upon the - successful completion of all build steps. - - The images will be pushed using the builder - service account's credentials. - The digests of the pushed images will be stored - in the Build resource's results field. - - If any of the images fail to be pushed, the - build is marked FAILURE. - objects (google.cloud.devtools.cloudbuild_v1.types.Artifacts.ArtifactObjects): - A list of objects to be uploaded to Cloud - Storage upon successful completion of all build - steps. - Files in the workspace matching specified paths - globs will be uploaded to the specified Cloud - Storage location using the builder service - account's credentials. - - The location and generation of the uploaded - objects will be stored in the Build resource's - results field. - - If any objects fail to be pushed, the build is - marked FAILURE. - maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.MavenArtifact]): - A list of Maven artifacts to be uploaded to - Artifact Registry upon successful completion of - all build steps. - Artifacts in the workspace matching specified - paths globs will be uploaded to the specified - Artifact Registry repository using the builder - service account's credentials. - - If any artifacts fail to be pushed, the build is - marked FAILURE. - python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.PythonPackage]): - A list of Python packages to be uploaded to - Artifact Registry upon successful completion of - all build steps. - The build service account credentials will be - used to perform the upload. - If any objects fail to be pushed, the build is - marked FAILURE. - npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.NpmPackage]): - A list of npm packages to be uploaded to - Artifact Registry upon successful completion of - all build steps. - Npm packages in the specified paths will be - uploaded to the specified Artifact Registry - repository using the builder service account's - credentials. - - If any packages fail to be pushed, the build is - marked FAILURE. - """ - - class ArtifactObjects(proto.Message): - r"""Files in the workspace to upload to Cloud Storage upon - successful completion of all build steps. - - Attributes: - location (str): - Cloud Storage bucket and optional object path, in the form - "gs://bucket/path/to/somewhere/". (see `Bucket Name - Requirements `__). - - Files in the workspace matching any path pattern will be - uploaded to Cloud Storage with this location as a prefix. - paths (MutableSequence[str]): - Path globs used to match files in the build's - workspace. - timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): - Output only. Stores timing information for - pushing all artifact objects. - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - timing: 'TimeSpan' = proto.Field( - proto.MESSAGE, - number=3, - message='TimeSpan', - ) - - class MavenArtifact(proto.Message): - r"""A Maven artifact to upload to Artifact Registry upon - successful completion of all build steps. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-maven.pkg.dev/$PROJECT/$REPOSITORY" - Artifact in the workspace specified by path will - be uploaded to Artifact Registry with this - location as a prefix. - path (str): - Path to an artifact in the build's workspace - to be uploaded to Artifact Registry. - This can be either an absolute path, - e.g. - /workspace/my-app/target/my-app-1.0.SNAPSHOT.jar - or a relative path from /workspace, - e.g. my-app/target/my-app-1.0.SNAPSHOT.jar. - artifact_id (str): - Maven ``artifactId`` value used when uploading the artifact - to Artifact Registry. - group_id (str): - Maven ``groupId`` value used when uploading the artifact to - Artifact Registry. - version (str): - Maven ``version`` value used when uploading the artifact to - Artifact Registry. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - path: str = proto.Field( - proto.STRING, - number=2, - ) - artifact_id: str = proto.Field( - proto.STRING, - number=3, - ) - group_id: str = proto.Field( - proto.STRING, - number=4, - ) - version: str = proto.Field( - proto.STRING, - number=5, - ) - - class PythonPackage(proto.Message): - r"""Python package to upload to Artifact Registry upon successful - completion of all build steps. A package can encapsulate - multiple objects to be uploaded to a single repository. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-python.pkg.dev/$PROJECT/$REPOSITORY" - Files in the workspace matching any path pattern - will be uploaded to Artifact Registry with this - location as a prefix. - paths (MutableSequence[str]): - Path globs used to match files in the build's workspace. For - Python/ Twine, this is usually ``dist/*``, and sometimes - additionally an ``.asc`` file. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - paths: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class NpmPackage(proto.Message): - r"""Npm package to upload to Artifact Registry upon successful - completion of all build steps. - - Attributes: - repository (str): - Artifact Registry repository, in the form - "https://$REGION-npm.pkg.dev/$PROJECT/$REPOSITORY" - Npm package in the workspace specified by path - will be zipped and uploaded to Artifact Registry - with this location as a prefix. - package_path (str): - Path to the package.json. - e.g. workspace/path/to/package - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - package_path: str = proto.Field( - proto.STRING, - number=2, - ) - - images: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - objects: ArtifactObjects = proto.Field( - proto.MESSAGE, - number=2, - message=ArtifactObjects, - ) - maven_artifacts: MutableSequence[MavenArtifact] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=MavenArtifact, - ) - python_packages: MutableSequence[PythonPackage] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=PythonPackage, - ) - npm_packages: MutableSequence[NpmPackage] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=NpmPackage, - ) - - -class TimeSpan(proto.Message): - r"""Start and end times for a build execution phase. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start of time span. - end_time (google.protobuf.timestamp_pb2.Timestamp): - End of time span. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class BuildOperationMetadata(proto.Message): - r"""Metadata for build operations. - - Attributes: - build (google.cloud.devtools.cloudbuild_v1.types.Build): - The build that the operation is tracking. - """ - - build: 'Build' = proto.Field( - proto.MESSAGE, - number=1, - message='Build', - ) - - -class SourceProvenance(proto.Message): - r"""Provenance of the source. Ways to find the original source, - or verify that some source was used for this build. - - Attributes: - resolved_storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): - A copy of the build's ``source.storage_source``, if exists, - with any generations resolved. - resolved_repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - A copy of the build's ``source.repo_source``, if exists, - with any revisions resolved. - resolved_storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - A copy of the build's ``source.storage_source_manifest``, if - exists, with any revisions resolved. This feature is in - Preview. - file_hashes (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.FileHashes]): - Output only. Hash(es) of the build source, which can be used - to verify that the original source integrity was maintained - in the build. Note that ``FileHashes`` will only be - populated if ``BuildOptions`` has requested a - ``SourceProvenanceHash``. - - The keys to this map are file paths used as build source and - the values contain the hash values for those files. - - If the build source came in a single package such as a - gzipped tarfile (``.tar.gz``), the ``FileHash`` will be for - the single path to that file. - """ - - resolved_storage_source: 'StorageSource' = proto.Field( - proto.MESSAGE, - number=3, - message='StorageSource', - ) - resolved_repo_source: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=6, - message='RepoSource', - ) - resolved_storage_source_manifest: 'StorageSourceManifest' = proto.Field( - proto.MESSAGE, - number=9, - message='StorageSourceManifest', - ) - file_hashes: MutableMapping[str, 'FileHashes'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=4, - message='FileHashes', - ) - - -class FileHashes(proto.Message): - r"""Container message for hashes of byte content of files, used - in SourceProvenance messages to verify integrity of source input - to the build. - - Attributes: - file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash]): - Collection of file hashes. - """ - - file_hash: MutableSequence['Hash'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Hash', - ) - - -class Hash(proto.Message): - r"""Container message for hash values. - - Attributes: - type_ (google.cloud.devtools.cloudbuild_v1.types.Hash.HashType): - The type of hash that was performed. - value (bytes): - The hash value. - """ - class HashType(proto.Enum): - r"""Specifies the hash algorithm, if any. - - Values: - NONE (0): - No hash requested. - SHA256 (1): - Use a sha256 hash. - MD5 (2): - Use a md5 hash. - SHA512 (4): - Use a sha512 hash. - """ - NONE = 0 - SHA256 = 1 - MD5 = 2 - SHA512 = 4 - - type_: HashType = proto.Field( - proto.ENUM, - number=1, - enum=HashType, - ) - value: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class Secrets(proto.Message): - r"""Secrets and secret environment variables. - - Attributes: - secret_manager (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.SecretManagerSecret]): - Secrets in Secret Manager and associated - secret environment variable. - inline (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.InlineSecret]): - Secrets encrypted with KMS key and the - associated secret environment variable. - """ - - secret_manager: MutableSequence['SecretManagerSecret'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SecretManagerSecret', - ) - inline: MutableSequence['InlineSecret'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InlineSecret', - ) - - -class InlineSecret(proto.Message): - r"""Pairs a set of secret environment variables mapped to - encrypted values with the Cloud KMS key to use to decrypt the - value. - - Attributes: - kms_key_name (str): - Resource name of Cloud KMS crypto key to decrypt the - encrypted value. In format: - projects/\ */locations/*/keyRings/*/cryptoKeys/* - env_map (MutableMapping[str, bytes]): - Map of environment variable name to its - encrypted value. - Secret environment variables must be unique - across all of a build's secrets, and must be - used by at least one build step. Values can be - at most 64 KB in size. There can be at most 100 - secret values across all of a build's secrets. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - env_map: MutableMapping[str, bytes] = proto.MapField( - proto.STRING, - proto.BYTES, - number=2, - ) - - -class SecretManagerSecret(proto.Message): - r"""Pairs a secret environment variable with a SecretVersion in - Secret Manager. - - Attributes: - version_name (str): - Resource name of the SecretVersion. In format: - projects/\ */secrets/*/versions/\* - env (str): - Environment variable name to associate with - the secret. Secret environment variables must be - unique across all of a build's secrets, and must - be used by at least one build step. - """ - - version_name: str = proto.Field( - proto.STRING, - number=1, - ) - env: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Secret(proto.Message): - r"""Pairs a set of secret environment variables containing encrypted - values with the Cloud KMS key to use to decrypt the value. Note: Use - ``kmsKeyName`` with ``available_secrets`` instead of using - ``kmsKeyName`` with ``secret``. For instructions see: - https://cloud.google.com/cloud-build/docs/securing-builds/use-encrypted-credentials. - - Attributes: - kms_key_name (str): - Cloud KMS key name to use to decrypt these - envs. - secret_env (MutableMapping[str, bytes]): - Map of environment variable name to its - encrypted value. - Secret environment variables must be unique - across all of a build's secrets, and must be - used by at least one build step. Values can be - at most 64 KB in size. There can be at most 100 - secret values across all of a build's secrets. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - secret_env: MutableMapping[str, bytes] = proto.MapField( - proto.STRING, - proto.BYTES, - number=3, - ) - - -class CreateBuildRequest(proto.Message): - r"""Request to create a new build. - - Attributes: - parent (str): - The parent resource where this build will be created. - Format: ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Required. Build resource to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - build: 'Build' = proto.Field( - proto.MESSAGE, - number=2, - message='Build', - ) - - -class GetBuildRequest(proto.Message): - r"""Request to get a build. - - Attributes: - name (str): - The name of the ``Build`` to retrieve. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. ID of the build. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBuildsRequest(proto.Message): - r"""Request to list builds. - - Attributes: - parent (str): - The parent of the collection of ``Builds``. Format: - ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project. - page_size (int): - Number of results to return in the list. - page_token (str): - The page token for the next page of Builds. - - If unspecified, the first page of results is returned. - - If the token is rejected for any reason, INVALID_ARGUMENT - will be thrown. In this case, the token should be discarded, - and pagination should be restarted from the first page of - results. - - See https://google.aip.dev/158 for more. - filter (str): - The raw filter text to constrain the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=9, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=8, - ) - - -class ListBuildsResponse(proto.Message): - r"""Response including listed builds. - - Attributes: - builds (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Build]): - Builds will be sorted by ``create_time``, descending. - next_page_token (str): - Token to receive the next page of results. - This will be absent if the end of the response - list has been reached. - """ - - @property - def raw_page(self): - return self - - builds: MutableSequence['Build'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Build', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelBuildRequest(proto.Message): - r"""Request to cancel an ongoing build. - - Attributes: - name (str): - The name of the ``Build`` to cancel. Format: - ``projects/{project}/locations/{location}/builds/{build}`` - project_id (str): - Required. ID of the project. - id (str): - Required. ID of the build. - """ - - name: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ApproveBuildRequest(proto.Message): - r"""Request to approve or reject a pending build. - - Attributes: - name (str): - Required. Name of the target build. For example: - "projects/{$project_id}/builds/{$build_id}". - approval_result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Approval decision and metadata. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - approval_result: 'ApprovalResult' = proto.Field( - proto.MESSAGE, - number=2, - message='ApprovalResult', - ) - - -class BuildApproval(proto.Message): - r"""BuildApproval describes a build's approval configuration, - state, and result. - - Attributes: - state (google.cloud.devtools.cloudbuild_v1.types.BuildApproval.State): - Output only. The state of this build's - approval. - config (google.cloud.devtools.cloudbuild_v1.types.ApprovalConfig): - Output only. Configuration for manual - approval of this build. - result (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult): - Output only. Result of manual approval for - this Build. - """ - class State(proto.Enum): - r"""Specifies the current state of a build's approval. - - Values: - STATE_UNSPECIFIED (0): - Default enum type. This should not be used. - PENDING (1): - Build approval is pending. - APPROVED (2): - Build approval has been approved. - REJECTED (3): - Build approval has been rejected. - CANCELLED (5): - Build was cancelled while it was still - pending approval. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - APPROVED = 2 - REJECTED = 3 - CANCELLED = 5 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - config: 'ApprovalConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='ApprovalConfig', - ) - result: 'ApprovalResult' = proto.Field( - proto.MESSAGE, - number=3, - message='ApprovalResult', - ) - - -class ApprovalConfig(proto.Message): - r"""ApprovalConfig describes configuration for manual approval of - a build. - - Attributes: - approval_required (bool): - Whether or not approval is needed. If this is - set on a build, it will become pending when - created, and will need to be explicitly approved - to start. - """ - - approval_required: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ApprovalResult(proto.Message): - r"""ApprovalResult describes the decision and associated metadata - of a manual approval of a build. - - Attributes: - approver_account (str): - Output only. Email of the user that called - the ApproveBuild API to approve or reject a - build at the time that the API was called. - approval_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the approval - decision was made. - decision (google.cloud.devtools.cloudbuild_v1.types.ApprovalResult.Decision): - Required. The decision of this manual - approval. - comment (str): - Optional. An optional comment for this manual - approval result. - url (str): - Optional. An optional URL tied to this manual - approval result. This field is essentially the - same as comment, except that it will be rendered - by the UI differently. An example use case is a - link to an external job that approved this - Build. - """ - class Decision(proto.Enum): - r"""Specifies whether or not this manual approval result is to - approve or reject a build. - - Values: - DECISION_UNSPECIFIED (0): - Default enum type. This should not be used. - APPROVED (1): - Build is approved. - REJECTED (2): - Build is rejected. - """ - DECISION_UNSPECIFIED = 0 - APPROVED = 1 - REJECTED = 2 - - approver_account: str = proto.Field( - proto.STRING, - number=2, - ) - approval_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - decision: Decision = proto.Field( - proto.ENUM, - number=4, - enum=Decision, - ) - comment: str = proto.Field( - proto.STRING, - number=5, - ) - url: str = proto.Field( - proto.STRING, - number=6, - ) - - -class BuildTrigger(proto.Message): - r"""Configuration for an automated build in response to source - repository changes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - resource_name (str): - The ``Trigger`` name with format: - ``projects/{project}/locations/{location}/triggers/{trigger}``, - where {trigger} is a unique identifier generated by the - service. - id (str): - Output only. Unique identifier of the - trigger. - description (str): - Human-readable description of this trigger. - name (str): - User-assigned name of the trigger. Must be - unique within the project. Trigger names must - meet the following requirements: - + They must contain only alphanumeric characters - and dashes. + They can be 1-64 characters long. - + They must begin and end with an alphanumeric - character. - tags (MutableSequence[str]): - Tags for annotation of a ``BuildTrigger`` - trigger_template (google.cloud.devtools.cloudbuild_v1.types.RepoSource): - Template describing the types of source changes to trigger a - build. - - Branch and tag names in trigger templates are interpreted as - regular expressions. Any branch or tag change that matches - that regular expression will trigger a build. - - Mutually exclusive with ``github``. - github (google.cloud.devtools.cloudbuild_v1.types.GitHubEventsConfig): - GitHubEventsConfig describes the configuration of a trigger - that creates a build whenever a GitHub event is received. - - Mutually exclusive with ``trigger_template``. - pubsub_config (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig): - PubsubConfig describes the configuration of a - trigger that creates a build whenever a Pub/Sub - message is published. - webhook_config (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig): - WebhookConfig describes the configuration of - a trigger that creates a build whenever a - webhook is sent to a trigger's webhook URL. - autodetect (bool): - Autodetect build configuration. The - following precedence is used (case insensitive): - 1. cloudbuild.yaml - 2. cloudbuild.yml - 3. cloudbuild.json - 4. Dockerfile - - Currently only available for GitHub App - Triggers. - - This field is a member of `oneof`_ ``build_template``. - build (google.cloud.devtools.cloudbuild_v1.types.Build): - Contents of the build template. - - This field is a member of `oneof`_ ``build_template``. - filename (str): - Path, from the source root, to the build - configuration file (i.e. cloudbuild.yaml). - - This field is a member of `oneof`_ ``build_template``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the trigger was - created. - disabled (bool): - If true, the trigger will never automatically - execute a build. - substitutions (MutableMapping[str, str]): - Substitutions for Build resource. The keys must match the - following regular expression: ``^_[A-Z0-9_]+$``. - ignored_files (MutableSequence[str]): - ignored_files and included_files are file glob matches using - https://golang.org/pkg/path/filepath/#Match extended with - support for "**". - - If ignored_files and changed files are both empty, then they - are not used to determine whether or not to trigger a build. - - If ignored_files is not empty, then we ignore any files that - match any of the ignored_file globs. If the change has no - files that are outside of the ignored_files globs, then we - do not trigger a build. - included_files (MutableSequence[str]): - If any of the files altered in the commit pass the - ignored_files filter and included_files is empty, then as - far as this filter is concerned, we should trigger the - build. - - If any of the files altered in the commit pass the - ignored_files filter and included_files is not empty, then - we make sure that at least one of those files matches a - included_files glob. If not, then we do not trigger a build. - filter (str): - Optional. A Common Expression Language - string. - service_account (str): - The service account used for all user-controlled operations - including UpdateBuildTrigger, RunBuildTrigger, CreateBuild, - and CancelBuild. If no service account is set, then the - standard Cloud Build service account - ([PROJECT_NUM]@system.gserviceaccount.com) will be used - instead. Format: - ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`` - repository_event_config (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig): - The configuration of a trigger that creates a - build whenever an event from Repo API is - received. - """ - - resource_name: str = proto.Field( - proto.STRING, - number=34, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=10, - ) - name: str = proto.Field( - proto.STRING, - number=21, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=19, - ) - trigger_template: 'RepoSource' = proto.Field( - proto.MESSAGE, - number=7, - message='RepoSource', - ) - github: 'GitHubEventsConfig' = proto.Field( - proto.MESSAGE, - number=13, - message='GitHubEventsConfig', - ) - pubsub_config: 'PubsubConfig' = proto.Field( - proto.MESSAGE, - number=29, - message='PubsubConfig', - ) - webhook_config: 'WebhookConfig' = proto.Field( - proto.MESSAGE, - number=31, - message='WebhookConfig', - ) - autodetect: bool = proto.Field( - proto.BOOL, - number=18, - oneof='build_template', - ) - build: 'Build' = proto.Field( - proto.MESSAGE, - number=4, - oneof='build_template', - message='Build', - ) - filename: str = proto.Field( - proto.STRING, - number=8, - oneof='build_template', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - disabled: bool = proto.Field( - proto.BOOL, - number=9, - ) - substitutions: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=11, - ) - ignored_files: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=15, - ) - included_files: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - filter: str = proto.Field( - proto.STRING, - number=30, - ) - service_account: str = proto.Field( - proto.STRING, - number=33, - ) - repository_event_config: 'RepositoryEventConfig' = proto.Field( - proto.MESSAGE, - number=39, - message='RepositoryEventConfig', - ) - - -class RepositoryEventConfig(proto.Message): - r"""The configuration of a trigger that creates a build whenever - an event from Repo API is received. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - repository (str): - The resource name of the Repo API resource. - repository_type (google.cloud.devtools.cloudbuild_v1.types.RepositoryEventConfig.RepositoryType): - Output only. The type of the SCM vendor the - repository points to. - pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): - Filter to match changes in pull requests. - - This field is a member of `oneof`_ ``filter``. - push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): - Filter to match changes in refs like - branches, tags. - - This field is a member of `oneof`_ ``filter``. - """ - class RepositoryType(proto.Enum): - r"""All possible SCM repo types from Repo API. - - Values: - REPOSITORY_TYPE_UNSPECIFIED (0): - If unspecified, RepositoryType defaults to - GITHUB. - GITHUB (1): - The SCM repo is GITHUB. - GITHUB_ENTERPRISE (2): - The SCM repo is GITHUB Enterprise. - GITLAB_ENTERPRISE (3): - The SCM repo is GITLAB Enterprise. - """ - REPOSITORY_TYPE_UNSPECIFIED = 0 - GITHUB = 1 - GITHUB_ENTERPRISE = 2 - GITLAB_ENTERPRISE = 3 - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - repository_type: RepositoryType = proto.Field( - proto.ENUM, - number=2, - enum=RepositoryType, - ) - pull_request: 'PullRequestFilter' = proto.Field( - proto.MESSAGE, - number=3, - oneof='filter', - message='PullRequestFilter', - ) - push: 'PushFilter' = proto.Field( - proto.MESSAGE, - number=4, - oneof='filter', - message='PushFilter', - ) - - -class GitHubEventsConfig(proto.Message): - r"""GitHubEventsConfig describes the configuration of a trigger - that creates a build whenever a GitHub event is received. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - installation_id (int): - The installationID that emits the GitHub - event. - owner (str): - Owner of the repository. For example: The - owner for - https://github.com/googlecloudplatform/cloud-builders - is "googlecloudplatform". - name (str): - Name of the repository. For example: The name - for - https://github.com/googlecloudplatform/cloud-builders - is "cloud-builders". - pull_request (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter): - filter to match changes in pull requests. - - This field is a member of `oneof`_ ``event``. - push (google.cloud.devtools.cloudbuild_v1.types.PushFilter): - filter to match changes in refs like - branches, tags. - - This field is a member of `oneof`_ ``event``. - """ - - installation_id: int = proto.Field( - proto.INT64, - number=1, - ) - owner: str = proto.Field( - proto.STRING, - number=6, - ) - name: str = proto.Field( - proto.STRING, - number=7, - ) - pull_request: 'PullRequestFilter' = proto.Field( - proto.MESSAGE, - number=4, - oneof='event', - message='PullRequestFilter', - ) - push: 'PushFilter' = proto.Field( - proto.MESSAGE, - number=5, - oneof='event', - message='PushFilter', - ) - - -class PubsubConfig(proto.Message): - r"""PubsubConfig describes the configuration of a trigger that - creates a build whenever a Pub/Sub message is published. - - Attributes: - subscription (str): - Output only. Name of the subscription. Format is - ``projects/{project}/subscriptions/{subscription}``. - topic (str): - The name of the topic from which this subscription is - receiving messages. Format is - ``projects/{project}/topics/{topic}``. - service_account_email (str): - Service account that will make the push - request. - state (google.cloud.devtools.cloudbuild_v1.types.PubsubConfig.State): - Potential issues with the underlying Pub/Sub - subscription configuration. Only populated on - get requests. - """ - class State(proto.Enum): - r"""Enumerates potential issues with the underlying Pub/Sub - subscription configuration. - - Values: - STATE_UNSPECIFIED (0): - The subscription configuration has not been - checked. - OK (1): - The Pub/Sub subscription is properly - configured. - SUBSCRIPTION_DELETED (2): - The subscription has been deleted. - TOPIC_DELETED (3): - The topic has been deleted. - SUBSCRIPTION_MISCONFIGURED (4): - Some of the subscription's field are - misconfigured. - """ - STATE_UNSPECIFIED = 0 - OK = 1 - SUBSCRIPTION_DELETED = 2 - TOPIC_DELETED = 3 - SUBSCRIPTION_MISCONFIGURED = 4 - - subscription: str = proto.Field( - proto.STRING, - number=1, - ) - topic: str = proto.Field( - proto.STRING, - number=2, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=3, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -class WebhookConfig(proto.Message): - r"""WebhookConfig describes the configuration of a trigger that - creates a build whenever a webhook is sent to a trigger's - webhook URL. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - secret (str): - Required. Resource name for the secret - required as a URL parameter. - - This field is a member of `oneof`_ ``auth_method``. - state (google.cloud.devtools.cloudbuild_v1.types.WebhookConfig.State): - Potential issues with the underlying Pub/Sub - subscription configuration. Only populated on - get requests. - """ - class State(proto.Enum): - r"""Enumerates potential issues with the Secret Manager secret - provided by the user. - - Values: - STATE_UNSPECIFIED (0): - The webhook auth configuration not been - checked. - OK (1): - The auth configuration is properly setup. - SECRET_DELETED (2): - The secret provided in auth_method has been deleted. - """ - STATE_UNSPECIFIED = 0 - OK = 1 - SECRET_DELETED = 2 - - secret: str = proto.Field( - proto.STRING, - number=3, - oneof='auth_method', - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -class PullRequestFilter(proto.Message): - r"""PullRequestFilter contains filter properties for matching - GitHub Pull Requests. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - branch (str): - Regex of branches to match. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - comment_control (google.cloud.devtools.cloudbuild_v1.types.PullRequestFilter.CommentControl): - Configure builds to run whether a repository owner or - collaborator need to comment ``/gcbrun``. - invert_regex (bool): - If true, branches that do NOT match the git_ref will trigger - a build. - """ - class CommentControl(proto.Enum): - r"""Controls behavior of Pull Request comments. - - Values: - COMMENTS_DISABLED (0): - Do not require comments on Pull Requests - before builds are triggered. - COMMENTS_ENABLED (1): - Enforce that repository owners or - collaborators must comment on Pull Requests - before builds are triggered. - COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY (2): - Enforce that repository owners or - collaborators must comment on external - contributors' Pull Requests before builds are - triggered. - """ - COMMENTS_DISABLED = 0 - COMMENTS_ENABLED = 1 - COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2 - - branch: str = proto.Field( - proto.STRING, - number=2, - oneof='git_ref', - ) - comment_control: CommentControl = proto.Field( - proto.ENUM, - number=5, - enum=CommentControl, - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class PushFilter(proto.Message): - r"""Push contains filter properties for matching GitHub git - pushes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - branch (str): - Regexes matching branches to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - tag (str): - Regexes matching tags to build. - The syntax of the regular expressions accepted - is the syntax accepted by RE2 and described at - https://github.com/google/re2/wiki/Syntax - - This field is a member of `oneof`_ ``git_ref``. - invert_regex (bool): - When true, only trigger a build if the revision regex does - NOT match the git_ref regex. - """ - - branch: str = proto.Field( - proto.STRING, - number=2, - oneof='git_ref', - ) - tag: str = proto.Field( - proto.STRING, - number=3, - oneof='git_ref', - ) - invert_regex: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class CreateBuildTriggerRequest(proto.Message): - r"""Request to create a new ``BuildTrigger``. - - Attributes: - parent (str): - The parent resource where this trigger will be created. - Format: ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project for which to - configure automatic builds. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger: 'BuildTrigger' = proto.Field( - proto.MESSAGE, - number=2, - message='BuildTrigger', - ) - - -class GetBuildTriggerRequest(proto.Message): - r"""Returns the ``BuildTrigger`` with the specified ID. - - Attributes: - name (str): - The name of the ``Trigger`` to retrieve. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. Identifier (``id`` or ``name``) of the - ``BuildTrigger`` to get. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBuildTriggersRequest(proto.Message): - r"""Request to list existing ``BuildTriggers``. - - Attributes: - parent (str): - The parent of the collection of ``Triggers``. Format: - ``projects/{project}/locations/{location}`` - project_id (str): - Required. ID of the project for which to list - BuildTriggers. - page_size (int): - Number of results to return in the list. - page_token (str): - Token to provide to skip to a particular spot - in the list. - """ - - parent: str = proto.Field( - proto.STRING, - number=4, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListBuildTriggersResponse(proto.Message): - r"""Response containing existing ``BuildTriggers``. - - Attributes: - triggers (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): - ``BuildTriggers`` for the project, sorted by ``create_time`` - descending. - next_page_token (str): - Token to receive the next page of results. - """ - - @property - def raw_page(self): - return self - - triggers: MutableSequence['BuildTrigger'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BuildTrigger', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteBuildTriggerRequest(proto.Message): - r"""Request to delete a ``BuildTrigger``. - - Attributes: - name (str): - The name of the ``Trigger`` to delete. Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateBuildTriggerRequest(proto.Message): - r"""Request to update an existing ``BuildTrigger``. - - Attributes: - project_id (str): - Required. ID of the project that owns the - trigger. - trigger_id (str): - Required. ID of the ``BuildTrigger`` to update. - trigger (google.cloud.devtools.cloudbuild_v1.types.BuildTrigger): - Required. ``BuildTrigger`` to update. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - trigger_id: str = proto.Field( - proto.STRING, - number=2, - ) - trigger: 'BuildTrigger' = proto.Field( - proto.MESSAGE, - number=3, - message='BuildTrigger', - ) - - -class BuildOptions(proto.Message): - r"""Optional arguments to enable specific features of builds. - - Attributes: - source_provenance_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Hash.HashType]): - Requested hash for SourceProvenance. - requested_verify_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.VerifyOption): - Requested verifiability options. - machine_type (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.MachineType): - Compute Engine machine type on which to run - the build. - disk_size_gb (int): - Requested disk size for the VM that runs the build. Note - that this is *NOT* "disk free"; some of the space will be - used by the operating system and build utilities. Also note - that this is the minimum disk size that will be allocated - for the build -- the build may run with a larger disk than - requested. At present, the maximum disk size is 2000GB; - builds that request more than the maximum are rejected with - an error. - substitution_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.SubstitutionOption): - Option to specify behavior when there is an error in the - substitution checks. - - NOTE: this is always set to ALLOW_LOOSE for triggered builds - and cannot be overridden in the build configuration file. - dynamic_substitutions (bool): - Option to specify whether or not to apply - bash style string operations to the - substitutions. - NOTE: this is always enabled for triggered - builds and cannot be overridden in the build - configuration file. - log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): - Option to define build log streaming behavior - to Cloud Storage. - worker_pool (str): - This field deprecated; please use ``pool.name`` instead. - pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): - Optional. Specification for execution on a ``WorkerPool``. - - See `running builds in a private - pool `__ - for more information. - logging (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LoggingMode): - Option to specify the logging mode, which - determines if and where build logs are stored. - env (MutableSequence[str]): - A list of global environment variable - definitions that will exist for all build steps - in this build. If a variable is defined in both - globally and in a build step, the variable will - use the build step value. - The elements are of the form "KEY=VALUE" for the - environment variable "KEY" being given the value - "VALUE". - secret_env (MutableSequence[str]): - A list of global environment variables, which are encrypted - using a Cloud Key Management Service crypto key. These - values must be specified in the build's ``Secret``. These - variables will be available to all build steps in this - build. - volumes (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Volume]): - Global list of volumes to mount for ALL build - steps - Each volume is created as an empty volume prior - to starting the build process. Upon completion - of the build, volumes and their contents are - discarded. Global volume names and paths cannot - conflict with the volumes defined a build step. - - Using a global volume in a build with only one - step is not valid as it is indicative of a build - request with an incorrect configuration. - default_logs_bucket_behavior (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.DefaultLogsBucketBehavior): - Optional. Option to specify how default logs - buckets are setup. - """ - class VerifyOption(proto.Enum): - r"""Specifies the manner in which the build should be verified, if at - all. - - If a verified build is requested, and any part of the process to - generate and upload provenance fails, the build will also fail. - - If the build does not request verification then that process may - occur, but is not guaranteed to. If it does occur and fails, the - build will not fail. - - For more information, see `Viewing Build - Provenance `__. - - Values: - NOT_VERIFIED (0): - Not a verifiable build (the default). - VERIFIED (1): - Build must be verified. - """ - NOT_VERIFIED = 0 - VERIFIED = 1 - - class MachineType(proto.Enum): - r"""Supported Compute Engine machine types. For more information, see - `Machine - types `__. - - Values: - UNSPECIFIED (0): - Standard machine type. - N1_HIGHCPU_8 (1): - Highcpu machine with 8 CPUs. - N1_HIGHCPU_32 (2): - Highcpu machine with 32 CPUs. - E2_HIGHCPU_8 (5): - Highcpu e2 machine with 8 CPUs. - E2_HIGHCPU_32 (6): - Highcpu e2 machine with 32 CPUs. - E2_MEDIUM (7): - E2 machine with 1 CPU. - """ - UNSPECIFIED = 0 - N1_HIGHCPU_8 = 1 - N1_HIGHCPU_32 = 2 - E2_HIGHCPU_8 = 5 - E2_HIGHCPU_32 = 6 - E2_MEDIUM = 7 - - class SubstitutionOption(proto.Enum): - r"""Specifies the behavior when there is an error in the - substitution checks. - - Values: - MUST_MATCH (0): - Fails the build if error in substitutions - checks, like missing a substitution in the - template or in the map. - ALLOW_LOOSE (1): - Do not fail the build if error in - substitutions checks. - """ - MUST_MATCH = 0 - ALLOW_LOOSE = 1 - - class LogStreamingOption(proto.Enum): - r"""Specifies the behavior when writing build logs to Cloud - Storage. - - Values: - STREAM_DEFAULT (0): - Service may automatically determine build log - streaming behavior. - STREAM_ON (1): - Build logs should be streamed to Cloud - Storage. - STREAM_OFF (2): - Build logs should not be streamed to Cloud - Storage; they will be written when the build is - completed. - """ - STREAM_DEFAULT = 0 - STREAM_ON = 1 - STREAM_OFF = 2 - - class LoggingMode(proto.Enum): - r"""Specifies the logging mode. - - Values: - LOGGING_UNSPECIFIED (0): - The service determines the logging mode. The default is - ``LEGACY``. Do not rely on the default logging behavior as - it may change in the future. - LEGACY (1): - Build logs are stored in Cloud Logging and - Cloud Storage. - GCS_ONLY (2): - Build logs are stored in Cloud Storage. - STACKDRIVER_ONLY (3): - This option is the same as CLOUD_LOGGING_ONLY. - CLOUD_LOGGING_ONLY (5): - Build logs are stored in Cloud Logging. Selecting this - option will not allow `logs - streaming `__. - NONE (4): - Turn off all logging. No build logs will be - captured. - """ - LOGGING_UNSPECIFIED = 0 - LEGACY = 1 - GCS_ONLY = 2 - STACKDRIVER_ONLY = 3 - CLOUD_LOGGING_ONLY = 5 - NONE = 4 - - class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. - - Values: - DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): - Unspecified. - REGIONAL_USER_OWNED_BUCKET (1): - Bucket is located in user-owned project in - the same region as the build. The builder - service account must have access to create and - write to GCS buckets in the build project. - """ - DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 - REGIONAL_USER_OWNED_BUCKET = 1 - - class PoolOption(proto.Message): - r"""Details about how a build should be executed on a ``WorkerPool``. - - See `running builds in a private - pool `__ - for more information. - - Attributes: - name (str): - The ``WorkerPool`` resource to execute the build on. You - must have ``cloudbuild.workerpools.use`` on the project - hosting the WorkerPool. - - Format - projects/{project}/locations/{location}/workerPools/{workerPoolId} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - source_provenance_hash: MutableSequence['Hash.HashType'] = proto.RepeatedField( - proto.ENUM, - number=1, - enum='Hash.HashType', - ) - requested_verify_option: VerifyOption = proto.Field( - proto.ENUM, - number=2, - enum=VerifyOption, - ) - machine_type: MachineType = proto.Field( - proto.ENUM, - number=3, - enum=MachineType, - ) - disk_size_gb: int = proto.Field( - proto.INT64, - number=6, - ) - substitution_option: SubstitutionOption = proto.Field( - proto.ENUM, - number=4, - enum=SubstitutionOption, - ) - dynamic_substitutions: bool = proto.Field( - proto.BOOL, - number=17, - ) - log_streaming_option: LogStreamingOption = proto.Field( - proto.ENUM, - number=5, - enum=LogStreamingOption, - ) - worker_pool: str = proto.Field( - proto.STRING, - number=7, - ) - pool: PoolOption = proto.Field( - proto.MESSAGE, - number=19, - message=PoolOption, - ) - logging: LoggingMode = proto.Field( - proto.ENUM, - number=11, - enum=LoggingMode, - ) - env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=12, - ) - secret_env: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - volumes: MutableSequence['Volume'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='Volume', - ) - default_logs_bucket_behavior: DefaultLogsBucketBehavior = proto.Field( - proto.ENUM, - number=21, - enum=DefaultLogsBucketBehavior, - ) - - -class ReceiveTriggerWebhookRequest(proto.Message): - r"""ReceiveTriggerWebhookRequest [Experimental] is the request object - accepted by the ReceiveTriggerWebhook method. - - Attributes: - name (str): - The name of the ``ReceiveTriggerWebhook`` to retrieve. - Format: - ``projects/{project}/locations/{location}/triggers/{trigger}`` - body (google.api.httpbody_pb2.HttpBody): - HTTP request body. - project_id (str): - Project in which the specified trigger lives - trigger (str): - Name of the trigger to run the payload - against - secret (str): - Secret token used for authorization if an - OAuth token isn't provided. - """ - - name: str = proto.Field( - proto.STRING, - number=5, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=1, - message=httpbody_pb2.HttpBody, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - trigger: str = proto.Field( - proto.STRING, - number=3, - ) - secret: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ReceiveTriggerWebhookResponse(proto.Message): - r"""ReceiveTriggerWebhookResponse [Experimental] is the response object - for the ReceiveTriggerWebhook method. - - """ - - -class WorkerPool(proto.Message): - r"""Configuration for a ``WorkerPool``. - - Cloud Build owns and maintains a pool of workers for general use and - have no access to a project's private network. By default, builds - submitted to Cloud Build will use a worker from this pool. - - If your build needs access to resources on a private network, create - and use a ``WorkerPool`` to run your builds. Private - ``WorkerPool``\ s give your builds access to any single VPC network - that you administer, including any on-prem resources connected to - that VPC network. For an overview of private pools, see `Private - pools - overview `__. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The resource name of the ``WorkerPool``, with - format - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - The value of ``{worker_pool}`` is provided by - ``worker_pool_id`` in ``CreateWorkerPool`` request and the - value of ``{location}`` is determined by the endpoint - accessed. - display_name (str): - A user-specified, human-readable name for the - ``WorkerPool``. If provided, this value must be 1-63 - characters. - uid (str): - Output only. A unique identifier for the ``WorkerPool``. - annotations (MutableMapping[str, str]): - User specified annotations. See - https://google.aip.dev/128#annotations - for more details such as format and size - limitations. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to create the - ``WorkerPool`` was received. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to update the - ``WorkerPool`` was received. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time at which the request to delete the - ``WorkerPool`` was received. - state (google.cloud.devtools.cloudbuild_v1.types.WorkerPool.State): - Output only. ``WorkerPool`` state. - private_pool_v1_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config): - Legacy Private Pool configuration. - - This field is a member of `oneof`_ ``config``. - etag (str): - Output only. Checksum computed by the server. - May be sent on update and delete requests to - ensure that the client has an up-to-date value - before proceeding. - """ - class State(proto.Enum): - r"""State of the ``WorkerPool``. - - Values: - STATE_UNSPECIFIED (0): - State of the ``WorkerPool`` is unknown. - CREATING (1): - ``WorkerPool`` is being created. - RUNNING (2): - ``WorkerPool`` is running. - DELETING (3): - ``WorkerPool`` is being deleted: cancelling builds and - draining workers. - DELETED (4): - ``WorkerPool`` is deleted. - UPDATING (5): - ``WorkerPool`` is being updated; new builds cannot be run. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - RUNNING = 2 - DELETING = 3 - DELETED = 4 - UPDATING = 5 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - uid: str = proto.Field( - proto.STRING, - number=3, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - private_pool_v1_config: 'PrivatePoolV1Config' = proto.Field( - proto.MESSAGE, - number=12, - oneof='config', - message='PrivatePoolV1Config', - ) - etag: str = proto.Field( - proto.STRING, - number=11, - ) - - -class PrivatePoolV1Config(proto.Message): - r"""Configuration for a V1 ``PrivatePool``. - - Attributes: - worker_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.WorkerConfig): - Machine configuration for the workers in the - pool. - network_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig): - Network configuration for the pool. - """ - - class WorkerConfig(proto.Message): - r"""Defines the configuration to be used for creating workers in - the pool. - - Attributes: - machine_type (str): - Machine type of a worker, such as ``e2-medium``. See `Worker - pool config - file `__. - If left blank, Cloud Build will use a sensible default. - disk_size_gb (int): - Size of the disk attached to the worker, in GB. See `Worker - pool config - file `__. - Specify a value of up to 2000. If ``0`` is specified, Cloud - Build will use a standard disk size. - """ - - machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - disk_size_gb: int = proto.Field( - proto.INT64, - number=2, - ) - - class NetworkConfig(proto.Message): - r"""Defines the network configuration for the pool. - - Attributes: - peered_network (str): - Required. Immutable. The network definition that the workers - are peered to. If this section is left empty, the workers - will be peered to ``WorkerPool.project_id`` on the service - producer network. Must be in the format - ``projects/{project}/global/networks/{network}``, where - ``{project}`` is a project number, such as ``12345``, and - ``{network}`` is the name of a VPC network in the project. - See `Understanding network configuration - options `__ - egress_option (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config.NetworkConfig.EgressOption): - Option to configure network egress for the - workers. - peered_network_ip_range (str): - Immutable. Subnet IP range within the peered network. This - is specified in CIDR notation with a slash and the subnet - prefix size. You can optionally specify an IP address before - the subnet prefix value. e.g. ``192.168.0.0/29`` would - specify an IP range starting at 192.168.0.0 with a prefix - size of 29 bits. ``/16`` would specify a prefix size of 16 - bits, with an automatically determined IP within the peered - VPC. If unspecified, a value of ``/24`` will be used. - """ - class EgressOption(proto.Enum): - r"""Defines the egress option for the pool. - - Values: - EGRESS_OPTION_UNSPECIFIED (0): - If set, defaults to PUBLIC_EGRESS. - NO_PUBLIC_EGRESS (1): - If set, workers are created without any - public address, which prevents network egress to - public IPs unless a network proxy is configured. - PUBLIC_EGRESS (2): - If set, workers are created with a public - address which allows for public internet egress. - """ - EGRESS_OPTION_UNSPECIFIED = 0 - NO_PUBLIC_EGRESS = 1 - PUBLIC_EGRESS = 2 - - peered_network: str = proto.Field( - proto.STRING, - number=1, - ) - egress_option: 'PrivatePoolV1Config.NetworkConfig.EgressOption' = proto.Field( - proto.ENUM, - number=2, - enum='PrivatePoolV1Config.NetworkConfig.EgressOption', - ) - peered_network_ip_range: str = proto.Field( - proto.STRING, - number=3, - ) - - worker_config: WorkerConfig = proto.Field( - proto.MESSAGE, - number=1, - message=WorkerConfig, - ) - network_config: NetworkConfig = proto.Field( - proto.MESSAGE, - number=2, - message=NetworkConfig, - ) - - -class CreateWorkerPoolRequest(proto.Message): - r"""Request to create a new ``WorkerPool``. - - Attributes: - parent (str): - Required. The parent resource where this worker pool will be - created. Format: - ``projects/{project}/locations/{location}``. - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. ``WorkerPool`` resource to create. - worker_pool_id (str): - Required. Immutable. The ID to use for the ``WorkerPool``, - which will become the final component of the resource name. - - This value should be 1-63 characters, and valid characters - are /[a-z][0-9]-/. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - worker_pool: 'WorkerPool' = proto.Field( - proto.MESSAGE, - number=2, - message='WorkerPool', - ) - worker_pool_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class GetWorkerPoolRequest(proto.Message): - r"""Request to get a ``WorkerPool`` with the specified name. - - Attributes: - name (str): - Required. The name of the ``WorkerPool`` to retrieve. - Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteWorkerPoolRequest(proto.Message): - r"""Request to delete a ``WorkerPool``. - - Attributes: - name (str): - Required. The name of the ``WorkerPool`` to delete. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - etag (str): - Optional. If provided, it must match the - server's etag on the workerpool for the request - to be processed. - allow_missing (bool): - If set to true, and the ``WorkerPool`` is not found, the - request will succeed but no action will be taken on the - server. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateWorkerPoolRequest(proto.Message): - r"""Request to update a ``WorkerPool``. - - Attributes: - worker_pool (google.cloud.devtools.cloudbuild_v1.types.WorkerPool): - Required. The ``WorkerPool`` to update. - - The ``name`` field is used to identify the ``WorkerPool`` to - update. Format: - ``projects/{project}/locations/{location}/workerPools/{workerPool}``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to update. - validate_only (bool): - If set, validate the request and preview the - response, but do not actually post it. - """ - - worker_pool: 'WorkerPool' = proto.Field( - proto.MESSAGE, - number=1, - message='WorkerPool', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class ListWorkerPoolsRequest(proto.Message): - r"""Request to list ``WorkerPool``\ s. - - Attributes: - parent (str): - Required. The parent of the collection of ``WorkerPools``. - Format: ``projects/{project}/locations/{location}``. - page_size (int): - The maximum number of ``WorkerPool``\ s to return. The - service may return fewer than this value. If omitted, the - server will use a sensible default. - page_token (str): - A page token, received from a previous ``ListWorkerPools`` - call. Provide this to retrieve the subsequent page. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListWorkerPoolsResponse(proto.Message): - r"""Response containing existing ``WorkerPools``. - - Attributes: - worker_pools (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): - ``WorkerPools`` for the specified project. - next_page_token (str): - Continuation token used to page through large - result sets. Provide this value in a subsequent - ListWorkerPoolsRequest to return the next page - of results. - """ - - @property - def raw_page(self): - return self - - worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='WorkerPool', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``CreateWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` to create. Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``UpdateWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` being updated. - Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class DeleteWorkerPoolOperationMetadata(proto.Message): - r"""Metadata for the ``DeleteWorkerPool`` operation. - - Attributes: - worker_pool (str): - The resource name of the ``WorkerPool`` being deleted. - Format: - ``projects/{project}/locations/{location}/workerPools/{worker_pool}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was created. - complete_time (google.protobuf.timestamp_pb2.Timestamp): - Time the operation was completed. - """ - - worker_pool: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - complete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index e09b880c..00000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/devtools/cloudbuild_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py deleted file mode 100644 index a8280c5e..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApproveBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py deleted file mode 100644 index e90be4b0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_approve_build_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApproveBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_approve_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ApproveBuildRequest( - name="name_value", - ) - - # Make the request - operation = client.approve_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py deleted file mode 100644 index 73320372..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.cancel_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py deleted file mode 100644 index 656b5d59..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_cancel_build_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_cancel_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CancelBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.cancel_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CancelBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py deleted file mode 100644 index 07750a37..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py deleted file mode 100644 index 173aea57..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateBuildRequest( - project_id="project_id_value", - ) - - # Make the request - operation = client.create_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py deleted file mode 100644 index 9fe3fcdf..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.create_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py deleted file mode 100644 index 8ddcd2f3..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.CreateBuildTriggerRequest( - project_id="project_id_value", - trigger=trigger, - ) - - # Make the request - response = client.create_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py deleted file mode 100644 index cd0a773b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py deleted file mode 100644 index 80396e7e..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_create_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.CreateWorkerPoolRequest( - parent="parent_value", - worker_pool_id="worker_pool_id_value", - ) - - # Make the request - operation = client.create_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py deleted file mode 100644 index 62955bcc..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - await client.delete_build_trigger(request=request) - - -# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py deleted file mode 100644 index 249ba150..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_delete_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - client.delete_build_trigger(request=request) - - -# [END cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py deleted file mode 100644 index 257fa9ba..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py deleted file mode 100644 index a2b9f632..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_delete_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.DeleteWorkerPoolRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py deleted file mode 100644 index 585bce61..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = await client.get_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py deleted file mode 100644 index d767fe6c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - response = client.get_build(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py deleted file mode 100644 index 373b419b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = await client.get_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py deleted file mode 100644 index f2dd1102..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - response = client.get_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py deleted file mode 100644 index 1ad3016f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = await client.get_worker_pool(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py deleted file mode 100644 index fd50d2fd..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_get_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.GetWorkerPoolRequest( - name="name_value", - ) - - # Make the request - response = client.get_worker_pool(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py deleted file mode 100644 index 43b21efa..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuildTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py deleted file mode 100644 index 86f6e1c1..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuildTriggers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_build_triggers(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildTriggersRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_build_triggers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py deleted file mode 100644 index 30ad36a2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuilds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py deleted file mode 100644 index 9c2813c3..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_builds_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBuilds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_builds(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListBuildsRequest( - project_id="project_id_value", - ) - - # Make the request - page_result = client.list_builds(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListBuilds_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py deleted file mode 100644 index 378636ad..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkerPools -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py deleted file mode 100644 index d23cdb76..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkerPools -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_list_worker_pools(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ListWorkerPoolsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_worker_pools(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py deleted file mode 100644 index 133c477b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReceiveTriggerWebhook -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = await client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py deleted file mode 100644 index 839f241c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReceiveTriggerWebhook -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_receive_trigger_webhook(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.ReceiveTriggerWebhookRequest( - ) - - # Make the request - response = client.receive_trigger_webhook(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py deleted file mode 100644 index 8c671273..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RetryBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py deleted file mode 100644 index 6b1d79f0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_retry_build_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RetryBuild -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_retry_build(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RetryBuildRequest( - project_id="project_id_value", - id="id_value", - ) - - # Make the request - operation = client.retry_build(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RetryBuild_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py deleted file mode 100644 index 1c33cfb2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py deleted file mode 100644 index 78b1a643..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_run_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.RunBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - ) - - # Make the request - operation = client.run_build_trigger(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py deleted file mode 100644 index 46d6ea7f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = await client.update_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py deleted file mode 100644 index 4022a4e4..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBuildTrigger -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_update_build_trigger(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - trigger = cloudbuild_v1.BuildTrigger() - trigger.autodetect = True - - request = cloudbuild_v1.UpdateBuildTriggerRequest( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=trigger, - ) - - # Make the request - response = client.update_build_trigger(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py deleted file mode 100644 index 4152140a..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -async def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py b/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py deleted file mode 100644 index b7bab1b1..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkerPool -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v1 - - -def sample_update_worker_pool(): - # Create a client - client = cloudbuild_v1.CloudBuildClient() - - # Initialize request argument(s) - request = cloudbuild_v1.UpdateWorkerPoolRequest( - ) - - # Make the request - operation = client.update_worker_pool(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json deleted file mode 100644 index e379efab..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ /dev/null @@ -1,3027 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.devtools.cloudbuild.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-build", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.approve_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ApproveBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "approval_result", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "approve_build" - }, - "description": "Sample for ApproveBuild", - "file": "cloudbuild_v1_generated_cloud_build_approve_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_approve_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.approve_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ApproveBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ApproveBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApproveBuildRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "approval_result", - "type": "google.cloud.devtools.cloudbuild_v1.types.ApprovalResult" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "approve_build" - }, - "description": "Sample for ApproveBuild", - "file": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ApproveBuild_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_approve_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.cancel_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CancelBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "cancel_build" - }, - "description": "Sample for CancelBuild", - "file": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_cancel_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.cancel_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CancelBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CancelBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "cancel_build" - }, - "description": "Sample for CancelBuild", - "file": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CancelBuild_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_cancel_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "create_build_trigger" - }, - "description": "Sample for CreateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "create_build_trigger" - }, - "description": "Sample for CreateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuildTrigger_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "build", - "type": "google.cloud.devtools.cloudbuild_v1.types.Build" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_build" - }, - "description": "Sample for CreateBuild", - "file": "cloudbuild_v1_generated_cloud_build_create_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "build", - "type": "google.cloud.devtools.cloudbuild_v1.types.Build" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_build" - }, - "description": "Sample for CreateBuild", - "file": "cloudbuild_v1_generated_cloud_build_create_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateBuild_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.create_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "worker_pool_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_worker_pool" - }, - "description": "Sample for CreateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.create_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "CreateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.CreateWorkerPoolRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "worker_pool_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_worker_pool" - }, - "description": "Sample for CreateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_CreateWorkerPool_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_create_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_build_trigger" - }, - "description": "Sample for DeleteBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_build_trigger" - }, - "description": "Sample for DeleteBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteBuildTrigger_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.delete_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_worker_pool" - }, - "description": "Sample for DeleteWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.delete_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "DeleteWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.DeleteWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_worker_pool" - }, - "description": "Sample for DeleteWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_DeleteWorkerPool_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_delete_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "get_build_trigger" - }, - "description": "Sample for GetBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "get_build_trigger" - }, - "description": "Sample for GetBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuildTrigger_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "get_build" - }, - "description": "Sample for GetBuild", - "file": "cloudbuild_v1_generated_cloud_build_get_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.Build", - "shortName": "get_build" - }, - "description": "Sample for GetBuild", - "file": "cloudbuild_v1_generated_cloud_build_get_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetBuild_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", - "shortName": "get_worker_pool" - }, - "description": "Sample for GetWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "GetWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.GetWorkerPoolRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool", - "shortName": "get_worker_pool" - }, - "description": "Sample for GetWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_GetWorkerPool_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_get_worker_pool_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_build_triggers", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuildTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersAsyncPager", - "shortName": "list_build_triggers" - }, - "description": "Sample for ListBuildTriggers", - "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_build_triggers", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuildTriggers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildTriggersPager", - "shortName": "list_build_triggers" - }, - "description": "Sample for ListBuildTriggers", - "file": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuildTriggers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_build_triggers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_builds", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuilds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsAsyncPager", - "shortName": "list_builds" - }, - "description": "Sample for ListBuilds", - "file": "cloudbuild_v1_generated_cloud_build_list_builds_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_builds_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_builds", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListBuilds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListBuildsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListBuildsPager", - "shortName": "list_builds" - }, - "description": "Sample for ListBuilds", - "file": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListBuilds_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_builds_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.list_worker_pools", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListWorkerPools" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsAsyncPager", - "shortName": "list_worker_pools" - }, - "description": "Sample for ListWorkerPools", - "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.list_worker_pools", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ListWorkerPools" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.services.cloud_build.pagers.ListWorkerPoolsPager", - "shortName": "list_worker_pools" - }, - "description": "Sample for ListWorkerPools", - "file": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ListWorkerPools_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_list_worker_pools_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.receive_trigger_webhook", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ReceiveTriggerWebhook" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", - "shortName": "receive_trigger_webhook" - }, - "description": "Sample for ReceiveTriggerWebhook", - "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.receive_trigger_webhook", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.ReceiveTriggerWebhook", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "ReceiveTriggerWebhook" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.ReceiveTriggerWebhookResponse", - "shortName": "receive_trigger_webhook" - }, - "description": "Sample for ReceiveTriggerWebhook", - "file": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_ReceiveTriggerWebhook_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_receive_trigger_webhook_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.retry_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RetryBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "retry_build" - }, - "description": "Sample for RetryBuild", - "file": "cloudbuild_v1_generated_cloud_build_retry_build_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_retry_build_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.retry_build", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RetryBuild" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RetryBuildRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "retry_build" - }, - "description": "Sample for RetryBuild", - "file": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RetryBuild_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_retry_build_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.run_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RunBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "source", - "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "run_build_trigger" - }, - "description": "Sample for RunBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.run_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "RunBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.RunBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "source", - "type": "google.cloud.devtools.cloudbuild_v1.types.RepoSource" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "run_build_trigger" - }, - "description": "Sample for RunBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_RunBuildTrigger_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_run_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "update_build_trigger" - }, - "description": "Sample for UpdateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_build_trigger", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateBuildTrigger" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateBuildTriggerRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "trigger_id", - "type": "str" - }, - { - "name": "trigger", - "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger", - "shortName": "update_build_trigger" - }, - "description": "Sample for UpdateBuildTrigger", - "file": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateBuildTrigger_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_build_trigger_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", - "shortName": "CloudBuildAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.update_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_worker_pool" - }, - "description": "Sample for UpdateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", - "shortName": "CloudBuildClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.update_worker_pool", - "method": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", - "service": { - "fullName": "google.devtools.cloudbuild.v1.CloudBuild", - "shortName": "CloudBuild" - }, - "shortName": "UpdateWorkerPool" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v1.types.UpdateWorkerPoolRequest" - }, - { - "name": "worker_pool", - "type": "google.cloud.devtools.cloudbuild_v1.types.WorkerPool" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_worker_pool" - }, - "description": "Sample for UpdateWorkerPool", - "file": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v1_generated_CloudBuild_UpdateWorkerPool_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v1_generated_cloud_build_update_worker_pool_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py deleted file mode 100644 index 78aabad9..00000000 --- a/owl-bot-staging/v1/scripts/fixup_cloudbuild_v1_keywords.py +++ /dev/null @@ -1,193 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class cloudbuildCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'approve_build': ('name', 'approval_result', ), - 'cancel_build': ('project_id', 'id', 'name', ), - 'create_build': ('project_id', 'build', 'parent', ), - 'create_build_trigger': ('project_id', 'trigger', 'parent', ), - 'create_worker_pool': ('parent', 'worker_pool', 'worker_pool_id', 'validate_only', ), - 'delete_build_trigger': ('project_id', 'trigger_id', 'name', ), - 'delete_worker_pool': ('name', 'etag', 'allow_missing', 'validate_only', ), - 'get_build': ('project_id', 'id', 'name', ), - 'get_build_trigger': ('project_id', 'trigger_id', 'name', ), - 'get_worker_pool': ('name', ), - 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), - 'list_build_triggers': ('project_id', 'parent', 'page_size', 'page_token', ), - 'list_worker_pools': ('parent', 'page_size', 'page_token', ), - 'receive_trigger_webhook': ('name', 'body', 'project_id', 'trigger', 'secret', ), - 'retry_build': ('project_id', 'id', 'name', ), - 'run_build_trigger': ('project_id', 'trigger_id', 'name', 'source', ), - 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), - 'update_worker_pool': ('worker_pool', 'update_mask', 'validate_only', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=cloudbuildCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the cloudbuild client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 49cab02d..00000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-build' - - -description = "Google Cloud Build API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-build" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud", "google.cloud.devtools"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py deleted file mode 100644 index eb31221d..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ /dev/null @@ -1,10280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api import httpbody_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports -from google.cloud.devtools.cloudbuild_v1.types import cloudbuild -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudBuildClient._get_default_mtls_endpoint(None) is None - assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), -]) -def test_cloud_build_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CloudBuildGrpcTransport, "grpc"), - (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudBuildRestTransport, "rest"), -]) -def test_cloud_build_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), -]) -def test_cloud_build_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -def test_cloud_build_client_get_transport_class(): - transport = CloudBuildClient.get_transport_class() - available_transports = [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildRestTransport, - ] - assert transport in available_transports - - transport = CloudBuildClient.get_transport_class("grpc") - assert transport == transports.CloudBuildGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -def test_cloud_build_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_build_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - CloudBuildClient, CloudBuildAsyncClient -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), -]) -def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), -]) -def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_cloud_build_client_client_options_from_dict(): - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CloudBuildClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cloud_build_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildRequest, - dict, -]) -def test_create_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - client.create_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - -@pytest.mark.asyncio -async def test_create_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_build_async_from_dict(): - await test_create_build_async(request_type=dict) - -def test_create_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].build - mock_val = cloudbuild.Build(name='name_value') - assert arg == mock_val - - -def test_create_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].build - mock_val = cloudbuild.Build(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildRequest, - dict, -]) -def test_get_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - response = client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_get_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - client.get_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - -@pytest.mark.asyncio -async def test_get_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - )) - response = await client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_build_async_from_dict(): - await test_get_build_async(request_type=dict) - -def test_get_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - call.return_value = cloudbuild.Build() - client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_get_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_get_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildsRequest, - dict, -]) -def test_list_builds(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_builds_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - client.list_builds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - -@pytest.mark.asyncio -async def test_list_builds_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildsRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_builds_async_from_dict(): - await test_list_builds_async(request_type=dict) - -def test_list_builds_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - call.return_value = cloudbuild.ListBuildsResponse() - client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_builds_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_builds( - project_id='project_id_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_list_builds_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_list_builds_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_builds( - project_id='project_id_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_builds_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - - -def test_list_builds_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_builds(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in results) -def test_list_builds_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - pages = list(client.list_builds(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_builds_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_builds(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_builds_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_builds(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CancelBuildRequest, - dict, -]) -def test_cancel_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - response = client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_cancel_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - client.cancel_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - -@pytest.mark.asyncio -async def test_cancel_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CancelBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - )) - response = await client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CancelBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_cancel_build_async_from_dict(): - await test_cancel_build_async(request_type=dict) - -def test_cancel_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - call.return_value = cloudbuild.Build() - client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_cancel_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_cancel_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_cancel_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.Build() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RetryBuildRequest, - dict, -]) -def test_retry_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_retry_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - client.retry_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - -@pytest.mark.asyncio -async def test_retry_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RetryBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RetryBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_retry_build_async_from_dict(): - await test_retry_build_async(request_type=dict) - -def test_retry_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_retry_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.retry_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - - -def test_retry_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - -@pytest.mark.asyncio -async def test_retry_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.retry_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.retry_build( - project_id='project_id_value', - id='id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].id - mock_val = 'id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_retry_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ApproveBuildRequest, - dict, -]) -def test_approve_build(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_approve_build_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - client.approve_build() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - -@pytest.mark.asyncio -async def test_approve_build_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ApproveBuildRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ApproveBuildRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_approve_build_async_from_dict(): - await test_approve_build_async(request_type=dict) - -def test_approve_build_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest(**{"name": "projects/sample1/locations/sample2/builds/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_approve_build_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.approve_build( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].approval_result - mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') - assert arg == mock_val - - -def test_approve_build_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - -@pytest.mark.asyncio -async def test_approve_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.approve_build), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.approve_build( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].approval_result - mock_val = cloudbuild.ApprovalResult(approver_account='approver_account_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_approve_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildTriggerRequest, - dict, -]) -def test_create_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_create_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - client.create_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_create_build_trigger_async_from_dict(): - await test_create_build_trigger_async(request_type=dict) - -def test_create_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - - -def test_create_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - -@pytest.mark.asyncio -async def test_create_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildTriggerRequest, - dict, -]) -def test_get_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_get_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - client.get_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_get_build_trigger_async_from_dict(): - await test_get_build_trigger_async(request_type=dict) - -def test_get_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - - -def test_get_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - -@pytest.mark.asyncio -async def test_get_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildTriggersRequest, - dict, -]) -def test_list_build_triggers(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_build_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - client.list_build_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - -@pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListBuildTriggersRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListBuildTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_build_triggers_async_from_dict(): - await test_list_build_triggers_async(request_type=dict) - -def test_list_build_triggers_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - call.return_value = cloudbuild.ListBuildTriggersResponse() - client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_build_triggers_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_build_triggers( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - - -def test_list_build_triggers_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - -@pytest.mark.asyncio -async def test_list_build_triggers_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListBuildTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_build_triggers( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_build_triggers_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - - -def test_list_build_triggers_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_build_triggers(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in results) -def test_list_build_triggers_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_build_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_build_triggers_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_build_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_build_triggers_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_build_triggers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteBuildTriggerRequest, - dict, -]) -def test_delete_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - client.delete_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_build_trigger_async_from_dict(): - await test_delete_build_trigger_async(request_type=dict) - -def test_delete_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - call.return_value = None - client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_delete_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - - -def test_delete_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - -@pytest.mark.asyncio -async def test_delete_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateBuildTriggerRequest, - dict, -]) -def test_update_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - response = client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_update_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - client.update_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - )) - response = await client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -@pytest.mark.asyncio -async def test_update_build_trigger_async_from_dict(): - await test_update_build_trigger_async(request_type=dict) - -def test_update_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest(**{"trigger": {"resource_name": "projects/sample1/locations/sample2/triggers/sample3"}}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - call.return_value = cloudbuild.BuildTrigger() - client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_update_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - - -def test_update_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - -@pytest.mark.asyncio -async def test_update_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.BuildTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].trigger - mock_val = cloudbuild.BuildTrigger(resource_name='resource_name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RunBuildTriggerRequest, - dict, -]) -def test_run_build_trigger(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_run_build_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - client.run_build_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - -@pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.RunBuildTriggerRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.RunBuildTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_run_build_trigger_async_from_dict(): - await test_run_build_trigger_async(request_type=dict) - -def test_run_build_trigger_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest(**{"name": "projects/sample1/locations/sample2/triggers/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_run_build_trigger_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].source - mock_val = cloudbuild.RepoSource(project_id='project_id_value') - assert arg == mock_val - - -def test_run_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - -@pytest.mark.asyncio -async def test_run_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].trigger_id - mock_val = 'trigger_id_value' - assert arg == mock_val - arg = args[0].source - mock_val = cloudbuild.RepoSource(project_id='project_id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_run_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ReceiveTriggerWebhookRequest, - dict, -]) -def test_receive_trigger_webhook(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ReceiveTriggerWebhookResponse( - ) - response = client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -def test_receive_trigger_webhook_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - client.receive_trigger_webhook() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ReceiveTriggerWebhookRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse( - )) - response = await client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ReceiveTriggerWebhookRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_async_from_dict(): - await test_receive_trigger_webhook_async(request_type=dict) - - -def test_receive_trigger_webhook_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ReceiveTriggerWebhookRequest() - - request.project_id = 'project_id_value' - request.trigger = 'trigger_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - call.return_value = cloudbuild.ReceiveTriggerWebhookResponse() - client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&trigger=trigger_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_receive_trigger_webhook_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ReceiveTriggerWebhookRequest() - - request.project_id = 'project_id_value' - request.trigger = 'trigger_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.receive_trigger_webhook), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ReceiveTriggerWebhookResponse()) - await client.receive_trigger_webhook(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&trigger=trigger_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateWorkerPoolRequest, - dict, -]) -def test_create_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - client.create_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.CreateWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.CreateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_worker_pool_async_from_dict(): - await test_create_worker_pool_async(request_type=dict) - -def test_create_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_create_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_worker_pool( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].worker_pool_id - mock_val = 'worker_pool_id_value' - assert arg == mock_val - - -def test_create_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - -@pytest.mark.asyncio -async def test_create_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_worker_pool( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].worker_pool_id - mock_val = 'worker_pool_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetWorkerPoolRequest, - dict, -]) -def test_get_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - ) - response = client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -def test_get_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - client.get_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.GetWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - )) - response = await client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.GetWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_worker_pool_async_from_dict(): - await test_get_worker_pool_async(request_type=dict) - -def test_get_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - call.return_value = cloudbuild.WorkerPool() - client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_get_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.WorkerPool() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteWorkerPoolRequest, - dict, -]) -def test_delete_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - client.delete_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.DeleteWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.DeleteWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_worker_pool_async_from_dict(): - await test_delete_worker_pool_async(request_type=dict) - -def test_delete_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest(**{"name": "projects/sample1/locations/sample2/workerPools/sample3"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_delete_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_worker_pool( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateWorkerPoolRequest, - dict, -]) -def test_update_worker_pool(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_worker_pool_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - client.update_worker_pool() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - -@pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.UpdateWorkerPoolRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.UpdateWorkerPoolRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_worker_pool_async_from_dict(): - await test_update_worker_pool_async(request_type=dict) - -def test_update_worker_pool_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest(**{"worker_pool": {"name": "projects/sample1/locations/sample2/workerPools/sample3"}}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_update_worker_pool_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_worker_pool( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_worker_pool_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_worker_pool_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_worker_pool( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].worker_pool - mock_val = cloudbuild.WorkerPool(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_worker_pool_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListWorkerPoolsRequest, - dict, -]) -def test_list_worker_pools(request_type, transport: str = 'grpc'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_worker_pools_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - client.list_worker_pools() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - -@pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = 'grpc_asyncio', request_type=cloudbuild.ListWorkerPoolsRequest): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudbuild.ListWorkerPoolsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_worker_pools_async_from_dict(): - await test_list_worker_pools_async(request_type=dict) - -def test_list_worker_pools_routing_parameters(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest(**{"parent": "projects/sample1/locations/sample2"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - call.return_value = cloudbuild.ListWorkerPoolsResponse() - client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_list_worker_pools_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_worker_pools( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_worker_pools_flattened_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_worker_pools_flattened_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_worker_pools( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_worker_pools_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - - -def test_list_worker_pools_pager(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.list_worker_pools(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in results) -def test_list_worker_pools_pages(transport_name: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - pages = list(client.list_worker_pools(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_worker_pools_async_pager(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_worker_pools(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_worker_pools_async_pages(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_worker_pools(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildRequest, - dict, -]) -def test_create_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(("parent", )) & set(("projectId", "build", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.CreateBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["build"] = {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_build(request) - - -def test_create_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) - - -def test_create_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build( - cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(name='name_value'), - ) - - -def test_create_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildRequest, - dict, -]) -def test_get_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_build(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_get_build_rest_required_fields(request_type=cloudbuild.GetBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) - - request = cloudbuild.GetBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.Build() - - client.get_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_build(request) - - -def test_get_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}" % client.transport._host, args[1]) - - -def test_get_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_get_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildsRequest, - dict, -]) -def test_list_builds_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_builds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_builds_rest_required_fields(request_type=cloudbuild.ListBuildsRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_builds._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", "parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_builds(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_builds_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_builds._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", "parent", )) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_builds_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_builds") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_builds") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListBuildsResponse.to_json(cloudbuild.ListBuildsResponse()) - - request = cloudbuild.ListBuildsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListBuildsResponse() - - client.list_builds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_builds_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildsRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_builds(request) - - -def test_list_builds_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_builds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds" % client.transport._host, args[1]) - - -def test_list_builds_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_builds( - cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', - ) - - -def test_list_builds_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListBuildsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.list_builds(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in results) - - pages = list(client.list_builds(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CancelBuildRequest, - dict, -]) -def test_cancel_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build( - name='name_value', - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.PENDING, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - service_account='service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_build(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.Build) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.status == cloudbuild.Build.Status.PENDING - assert response.status_detail == 'status_detail_value' - assert response.images == ['images_value'] - assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == 'log_url_value' - assert response.tags == ['tags_value'] - assert response.service_account == 'service_account_value' - - -def test_cancel_build_rest_required_fields(request_type=cloudbuild.CancelBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_cancel_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_cancel_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) - - request = cloudbuild.CancelBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.Build() - - client.cancel_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_cancel_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CancelBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_build(request) - - -def test_cancel_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.Build() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.Build.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.cancel_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:cancel" % client.transport._host, args[1]) - - -def test_cancel_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_build( - cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_cancel_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RetryBuildRequest, - dict, -]) -def test_retry_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.retry_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_retry_build_rest_required_fields(request_type=cloudbuild.RetryBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["id"] = 'id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).retry_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "id" in jsonified_request - assert jsonified_request["id"] == 'id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.retry_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_retry_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.retry_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "id", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_retry_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_retry_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_retry_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.RetryBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.retry_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_retry_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RetryBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.retry_build(request) - - -def test_retry_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - id='id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.retry_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/builds/{id}:retry" % client.transport._host, args[1]) - - -def test_retry_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.retry_build( - cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', - ) - - -def test_retry_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ApproveBuildRequest, - dict, -]) -def test_approve_build_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/builds/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.approve_build(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_approve_build_rest_required_fields(request_type=cloudbuild.ApproveBuildRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).approve_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.approve_build(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_approve_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.approve_build._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_approve_build_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_approve_build") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_approve_build") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.ApproveBuildRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.approve_build(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_approve_build_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ApproveBuildRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/builds/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.approve_build(request) - - -def test_approve_build_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/builds/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.approve_build(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/builds/*}:approve" % client.transport._host, args[1]) - - -def test_approve_build_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.approve_build( - cloudbuild.ApproveBuildRequest(), - name='name_value', - approval_result=cloudbuild.ApprovalResult(approver_account='approver_account_value'), - ) - - -def test_approve_build_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateBuildTriggerRequest, - dict, -]) -def test_create_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_create_build_trigger_rest_required_fields(request_type=cloudbuild.CreateBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("parent", )) & set(("projectId", "trigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateBuildTriggerRequest.pb(cloudbuild.CreateBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.CreateBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.create_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_build_trigger(request) - - -def test_create_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) - - -def test_create_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_build_trigger( - cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -def test_create_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetBuildTriggerRequest, - dict, -]) -def test_get_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_get_build_trigger_rest_required_fields(request_type=cloudbuild.GetBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetBuildTriggerRequest.pb(cloudbuild.GetBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.GetBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.get_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_build_trigger(request) - - -def test_get_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_get_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_build_trigger( - cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -def test_get_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListBuildTriggersRequest, - dict, -]) -def test_list_build_triggers_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_build_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_build_triggers_rest_required_fields(request_type=cloudbuild.ListBuildTriggersRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_build_triggers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "parent", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_build_triggers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_build_triggers_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_build_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "parent", )) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_build_triggers_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_build_triggers") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_build_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListBuildTriggersRequest.pb(cloudbuild.ListBuildTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListBuildTriggersResponse.to_json(cloudbuild.ListBuildTriggersResponse()) - - request = cloudbuild.ListBuildTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListBuildTriggersResponse() - - client.list_build_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_build_triggers_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListBuildTriggersRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_build_triggers(request) - - -def test_list_build_triggers_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListBuildTriggersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_build_triggers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1]) - - -def test_list_build_triggers_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', - ) - - -def test_list_build_triggers_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListBuildTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.list_build_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in results) - - pages = list(client.list_build_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteBuildTriggerRequest, - dict, -]) -def test_delete_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_build_trigger(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_build_trigger_rest_required_fields(request_type=cloudbuild.DeleteBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_build_trigger") as pre: - pre.assert_not_called() - pb_message = cloudbuild.DeleteBuildTriggerRequest.pb(cloudbuild.DeleteBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = cloudbuild.DeleteBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_build_trigger(request) - - -def test_delete_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_delete_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_build_trigger( - cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - ) - - -def test_delete_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateBuildTriggerRequest, - dict, -]) -def test_update_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger( - resource_name='resource_name_value', - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - filter='filter_value', - service_account='service_account_value', - autodetect=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_build_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.BuildTrigger) - assert response.resource_name == 'resource_name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.name == 'name_value' - assert response.tags == ['tags_value'] - assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ['included_files_value'] - assert response.filter == 'filter_value' - assert response.service_account == 'service_account_value' - - -def test_update_build_trigger_rest_required_fields(request_type=cloudbuild.UpdateBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "triggerId", "trigger", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.UpdateBuildTriggerRequest.pb(cloudbuild.UpdateBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.BuildTrigger.to_json(cloudbuild.BuildTrigger()) - - request = cloudbuild.UpdateBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.BuildTrigger() - - client.update_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["trigger"] = {'resource_name': 'resource_name_value', 'id': 'id_value', 'description': 'description_value', 'name': 'name_value', 'tags': ['tags_value1', 'tags_value2'], 'trigger_template': {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}}, 'github': {'installation_id': 1598, 'owner': 'owner_value', 'name': 'name_value', 'pull_request': {'branch': 'branch_value', 'comment_control': 1, 'invert_regex': True}, 'push': {'branch': 'branch_value', 'tag': 'tag_value', 'invert_regex': True}}, 'pubsub_config': {'subscription': 'subscription_value', 'topic': 'topic_value', 'service_account_email': 'service_account_email_value', 'state': 1}, 'webhook_config': {'secret': 'secret_value', 'state': 1}, 'autodetect': True, 'build': {'name': 'name_value', 'id': 'id_value', 'project_id': 'project_id_value', 'status': 10, 'status_detail': 'status_detail_value', 'source': {'storage_source': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}, 'repo_source': {}, 'git_source': {'url': 'url_value', 'dir_': 'dir__value', 'revision': 'revision_value'}, 'storage_source_manifest': {'bucket': 'bucket_value', 'object_': 'object__value', 'generation': 1068}}, 'steps': [{'name': 'name_value', 'env': ['env_value1', 'env_value2'], 'args': ['args_value1', 'args_value2'], 'dir_': 'dir__value', 'id': 'id_value', 'wait_for': ['wait_for_value1', 'wait_for_value2'], 'entrypoint': 'entrypoint_value', 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': [{'name': 'name_value', 'path': 'path_value'}], 'timing': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'pull_timing': {}, 'timeout': {'seconds': 751, 'nanos': 543}, 'status': 10, 'allow_failure': True, 'exit_code': 948, 'allow_exit_codes': [1702, 1703], 'script': 'script_value'}], 'results': {'images': [{'name': 'name_value', 'digest': 'digest_value', 'push_timing': {}}], 'build_step_images': ['build_step_images_value1', 'build_step_images_value2'], 'artifact_manifest': 'artifact_manifest_value', 'num_artifacts': 1392, 'build_step_outputs': [b'build_step_outputs_blob1', b'build_step_outputs_blob2'], 'artifact_timing': {}, 'python_packages': [{'uri': 'uri_value', 'file_hashes': {'file_hash': [{'type_': 1, 'value': b'value_blob'}]}, 'push_timing': {}}], 'maven_artifacts': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}], 'npm_packages': [{'uri': 'uri_value', 'file_hashes': {}, 'push_timing': {}}]}, 'create_time': {}, 'start_time': {}, 'finish_time': {}, 'timeout': {}, 'images': ['images_value1', 'images_value2'], 'queue_ttl': {}, 'artifacts': {'images': ['images_value1', 'images_value2'], 'objects': {'location': 'location_value', 'paths': ['paths_value1', 'paths_value2'], 'timing': {}}, 'maven_artifacts': [{'repository': 'repository_value', 'path': 'path_value', 'artifact_id': 'artifact_id_value', 'group_id': 'group_id_value', 'version': 'version_value'}], 'python_packages': [{'repository': 'repository_value', 'paths': ['paths_value1', 'paths_value2']}], 'npm_packages': [{'repository': 'repository_value', 'package_path': 'package_path_value'}]}, 'logs_bucket': 'logs_bucket_value', 'source_provenance': {'resolved_storage_source': {}, 'resolved_repo_source': {}, 'resolved_storage_source_manifest': {}, 'file_hashes': {}}, 'build_trigger_id': 'build_trigger_id_value', 'options': {'source_provenance_hash': [1], 'requested_verify_option': 1, 'machine_type': 1, 'disk_size_gb': 1261, 'substitution_option': 1, 'dynamic_substitutions': True, 'log_streaming_option': 1, 'worker_pool': 'worker_pool_value', 'pool': {'name': 'name_value'}, 'logging': 1, 'env': ['env_value1', 'env_value2'], 'secret_env': ['secret_env_value1', 'secret_env_value2'], 'volumes': {}, 'default_logs_bucket_behavior': 1}, 'log_url': 'log_url_value', 'substitutions': {}, 'tags': ['tags_value1', 'tags_value2'], 'secrets': [{'kms_key_name': 'kms_key_name_value', 'secret_env': {}}], 'timing': {}, 'approval': {'state': 1, 'config': {'approval_required': True}, 'result': {'approver_account': 'approver_account_value', 'approval_time': {}, 'decision': 1, 'comment': 'comment_value', 'url': 'url_value'}}, 'service_account': 'service_account_value', 'available_secrets': {'secret_manager': [{'version_name': 'version_name_value', 'env': 'env_value'}], 'inline': [{'kms_key_name': 'kms_key_name_value', 'env_map': {}}]}, 'warnings': [{'text': 'text_value', 'priority': 1}], 'failure_info': {'type_': 1, 'detail': 'detail_value'}}, 'filename': 'filename_value', 'create_time': {}, 'disabled': True, 'substitutions': {}, 'ignored_files': ['ignored_files_value1', 'ignored_files_value2'], 'included_files': ['included_files_value1', 'included_files_value2'], 'filter': 'filter_value', 'service_account': 'service_account_value', 'repository_event_config': {'repository': 'repository_value', 'repository_type': 1, 'pull_request': {}, 'push': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_build_trigger(request) - - -def test_update_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.BuildTrigger() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.BuildTrigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}" % client.transport._host, args[1]) - - -def test_update_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_build_trigger( - cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(resource_name='resource_name_value'), - ) - - -def test_update_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.RunBuildTriggerRequest, - dict, -]) -def test_run_build_trigger_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.run_build_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_run_build_trigger_rest_required_fields(request_type=cloudbuild.RunBuildTriggerRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["trigger_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["triggerId"] = 'trigger_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_build_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.run_build_trigger(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_build_trigger_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_build_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name", )) & set(("projectId", "triggerId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_build_trigger_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_run_build_trigger") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_run_build_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.RunBuildTriggerRequest.pb(cloudbuild.RunBuildTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.RunBuildTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.run_build_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_build_trigger_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.RunBuildTriggerRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger_id': 'sample2'} - request_init["source"] = {'project_id': 'project_id_value', 'repo_name': 'repo_name_value', 'branch_name': 'branch_name_value', 'tag_name': 'tag_name_value', 'commit_sha': 'commit_sha_value', 'dir_': 'dir__value', 'invert_regex': True, 'substitutions': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_build_trigger(request) - - -def test_run_build_trigger_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'trigger_id': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.run_build_trigger(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/triggers/{trigger_id}:run" % client.transport._host, args[1]) - - -def test_run_build_trigger_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_build_trigger( - cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), - ) - - -def test_run_build_trigger_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ReceiveTriggerWebhookRequest, - dict, -]) -def test_receive_trigger_webhook_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger': 'sample2'} - request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ReceiveTriggerWebhookResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ReceiveTriggerWebhookResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.receive_trigger_webhook(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_receive_trigger_webhook_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb(cloudbuild.ReceiveTriggerWebhookRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ReceiveTriggerWebhookResponse.to_json(cloudbuild.ReceiveTriggerWebhookResponse()) - - request = cloudbuild.ReceiveTriggerWebhookRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() - - client.receive_trigger_webhook(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_receive_trigger_webhook_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ReceiveTriggerWebhookRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'trigger': 'sample2'} - request_init["body"] = {'content_type': 'content_type_value', 'data': b'data_blob', 'extensions': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.receive_trigger_webhook(request) - - -def test_receive_trigger_webhook_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.CreateWorkerPoolRequest, - dict, -]) -def test_create_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_worker_pool_rest_required_fields(request_type=cloudbuild.CreateWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["worker_pool_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "workerPoolId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "workerPoolId" in jsonified_request - assert jsonified_request["workerPoolId"] == request_init["worker_pool_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["workerPoolId"] = 'worker_pool_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", "worker_pool_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "workerPoolId" in jsonified_request - assert jsonified_request["workerPoolId"] == 'worker_pool_id_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_worker_pool(request) - - expected_params = [ - ( - "workerPoolId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", "workerPoolId", )) & set(("parent", "workerPool", "workerPoolId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_create_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_create_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.CreateWorkerPoolRequest.pb(cloudbuild.CreateWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.CreateWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.CreateWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["worker_pool"] = {'name': 'name_value', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_worker_pool(request) - - -def test_create_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) - - -def test_create_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_worker_pool( - cloudbuild.CreateWorkerPoolRequest(), - parent='parent_value', - worker_pool=cloudbuild.WorkerPool(name='name_value'), - worker_pool_id='worker_pool_id_value', - ) - - -def test_create_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.GetWorkerPoolRequest, - dict, -]) -def test_get_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=cloudbuild.WorkerPool.State.CREATING, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_worker_pool(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == cloudbuild.WorkerPool.State.CREATING - assert response.etag == 'etag_value' - - -def test_get_worker_pool_rest_required_fields(request_type=cloudbuild.GetWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_get_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_get_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.GetWorkerPoolRequest.pb(cloudbuild.GetWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.WorkerPool.to_json(cloudbuild.WorkerPool()) - - request = cloudbuild.GetWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.WorkerPool() - - client.get_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.GetWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_worker_pool(request) - - -def test_get_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.WorkerPool() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.WorkerPool.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_get_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_worker_pool( - cloudbuild.GetWorkerPoolRequest(), - name='name_value', - ) - - -def test_get_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.DeleteWorkerPoolRequest, - dict, -]) -def test_delete_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_worker_pool_rest_required_fields(request_type=cloudbuild.DeleteWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_delete_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_delete_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.DeleteWorkerPoolRequest.pb(cloudbuild.DeleteWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.DeleteWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.DeleteWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_worker_pool(request) - - -def test_delete_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workerPools/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_delete_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_worker_pool( - cloudbuild.DeleteWorkerPoolRequest(), - name='name_value', - ) - - -def test_delete_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.UpdateWorkerPoolRequest, - dict, -]) -def test_update_worker_pool_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_worker_pool(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_worker_pool_rest_required_fields(request_type=cloudbuild.UpdateWorkerPoolRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_worker_pool._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_worker_pool(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_worker_pool_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_worker_pool._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("workerPool", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_worker_pool_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_update_worker_pool") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_update_worker_pool") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.UpdateWorkerPoolRequest.pb(cloudbuild.UpdateWorkerPoolRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloudbuild.UpdateWorkerPoolRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_worker_pool(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_worker_pool_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.UpdateWorkerPoolRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - request_init["worker_pool"] = {'name': 'projects/sample1/locations/sample2/workerPools/sample3', 'display_name': 'display_name_value', 'uid': 'uid_value', 'annotations': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'state': 1, 'private_pool_v1_config': {'worker_config': {'machine_type': 'machine_type_value', 'disk_size_gb': 1261}, 'network_config': {'peered_network': 'peered_network_value', 'egress_option': 1, 'peered_network_ip_range': 'peered_network_ip_range_value'}}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_worker_pool(request) - - -def test_update_worker_pool_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'worker_pool': {'name': 'projects/sample1/locations/sample2/workerPools/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_worker_pool(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}" % client.transport._host, args[1]) - - -def test_update_worker_pool_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_worker_pool( - cloudbuild.UpdateWorkerPoolRequest(), - worker_pool=cloudbuild.WorkerPool(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_worker_pool_rest_error(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloudbuild.ListWorkerPoolsRequest, - dict, -]) -def test_list_worker_pools_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_worker_pools(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkerPoolsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_worker_pools_rest_required_fields(request_type=cloudbuild.ListWorkerPoolsRequest): - transport_class = transports.CloudBuildRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_worker_pools._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_worker_pools(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_worker_pools_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_worker_pools._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_worker_pools_rest_interceptors(null_interceptor): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudBuildRestInterceptor(), - ) - client = CloudBuildClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "post_list_worker_pools") as post, \ - mock.patch.object(transports.CloudBuildRestInterceptor, "pre_list_worker_pools") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloudbuild.ListWorkerPoolsRequest.pb(cloudbuild.ListWorkerPoolsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloudbuild.ListWorkerPoolsResponse.to_json(cloudbuild.ListWorkerPoolsResponse()) - - request = cloudbuild.ListWorkerPoolsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloudbuild.ListWorkerPoolsResponse() - - client.list_worker_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_worker_pools_rest_bad_request(transport: str = 'rest', request_type=cloudbuild.ListWorkerPoolsRequest): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_worker_pools(request) - - -def test_list_worker_pools_rest_flattened(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloudbuild.ListWorkerPoolsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_worker_pools(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workerPools" % client.transport._host, args[1]) - - -def test_list_worker_pools_rest_flattened_error(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_worker_pools( - cloudbuild.ListWorkerPoolsRequest(), - parent='parent_value', - ) - - -def test_list_worker_pools_rest_pager(transport: str = 'rest'): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - next_page_token='abc', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[], - next_page_token='def', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - ], - next_page_token='ghi', - ), - cloudbuild.ListWorkerPoolsResponse( - worker_pools=[ - cloudbuild.WorkerPool(), - cloudbuild.WorkerPool(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(cloudbuild.ListWorkerPoolsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_worker_pools(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloudbuild.WorkerPool) - for i in results) - - pages = list(client.list_worker_pools(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudBuildClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudBuildGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = CloudBuildClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudBuildGrpcTransport, - ) - -def test_cloud_build_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cloud_build_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_build', - 'get_build', - 'list_builds', - 'cancel_build', - 'retry_build', - 'approve_build', - 'create_build_trigger', - 'get_build_trigger', - 'list_build_triggers', - 'delete_build_trigger', - 'update_build_trigger', - 'run_build_trigger', - 'receive_trigger_webhook', - 'create_worker_pool', - 'get_worker_pool', - 'delete_worker_pool', - 'update_worker_pool', - 'list_worker_pools', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cloud_build_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_cloud_build_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport() - adc.assert_called_once() - - -def test_cloud_build_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudBuildClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - ], -) -def test_cloud_build_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, - ], -) -def test_cloud_build_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudBuildGrpcTransport, grpc_helpers), - (transports.CloudBuildGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_build_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_cloud_build_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudBuildRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_cloud_build_rest_lro_client(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_build_host_no_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cloud_build_host_with_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_cloud_build_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudBuildClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudBuildClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_build._session - session2 = client2.transport.create_build._session - assert session1 != session2 - session1 = client1.transport.get_build._session - session2 = client2.transport.get_build._session - assert session1 != session2 - session1 = client1.transport.list_builds._session - session2 = client2.transport.list_builds._session - assert session1 != session2 - session1 = client1.transport.cancel_build._session - session2 = client2.transport.cancel_build._session - assert session1 != session2 - session1 = client1.transport.retry_build._session - session2 = client2.transport.retry_build._session - assert session1 != session2 - session1 = client1.transport.approve_build._session - session2 = client2.transport.approve_build._session - assert session1 != session2 - session1 = client1.transport.create_build_trigger._session - session2 = client2.transport.create_build_trigger._session - assert session1 != session2 - session1 = client1.transport.get_build_trigger._session - session2 = client2.transport.get_build_trigger._session - assert session1 != session2 - session1 = client1.transport.list_build_triggers._session - session2 = client2.transport.list_build_triggers._session - assert session1 != session2 - session1 = client1.transport.delete_build_trigger._session - session2 = client2.transport.delete_build_trigger._session - assert session1 != session2 - session1 = client1.transport.update_build_trigger._session - session2 = client2.transport.update_build_trigger._session - assert session1 != session2 - session1 = client1.transport.run_build_trigger._session - session2 = client2.transport.run_build_trigger._session - assert session1 != session2 - session1 = client1.transport.receive_trigger_webhook._session - session2 = client2.transport.receive_trigger_webhook._session - assert session1 != session2 - session1 = client1.transport.create_worker_pool._session - session2 = client2.transport.create_worker_pool._session - assert session1 != session2 - session1 = client1.transport.get_worker_pool._session - session2 = client2.transport.get_worker_pool._session - assert session1 != session2 - session1 = client1.transport.delete_worker_pool._session - session2 = client2.transport.delete_worker_pool._session - assert session1 != session2 - session1 = client1.transport.update_worker_pool._session - session2 = client2.transport.update_worker_pool._session - assert session1 != session2 - session1 = client1.transport.list_worker_pools._session - session2 = client2.transport.list_worker_pools._session - assert session1 != session2 -def test_cloud_build_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_build_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport]) -def test_cloud_build_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cloud_build_grpc_lro_client(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cloud_build_grpc_lro_async_client(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_build_path(): - project = "squid" - build = "clam" - expected = "projects/{project}/builds/{build}".format(project=project, build=build, ) - actual = CloudBuildClient.build_path(project, build) - assert expected == actual - - -def test_parse_build_path(): - expected = { - "project": "whelk", - "build": "octopus", - } - path = CloudBuildClient.build_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_build_path(path) - assert expected == actual - -def test_build_trigger_path(): - project = "oyster" - trigger = "nudibranch" - expected = "projects/{project}/triggers/{trigger}".format(project=project, trigger=trigger, ) - actual = CloudBuildClient.build_trigger_path(project, trigger) - assert expected == actual - - -def test_parse_build_trigger_path(): - expected = { - "project": "cuttlefish", - "trigger": "mussel", - } - path = CloudBuildClient.build_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_build_trigger_path(path) - assert expected == actual - -def test_crypto_key_path(): - project = "winkle" - location = "nautilus" - keyring = "scallop" - key = "abalone" - expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format(project=project, location=location, keyring=keyring, key=key, ) - actual = CloudBuildClient.crypto_key_path(project, location, keyring, key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "squid", - "location": "clam", - "keyring": "whelk", - "key": "octopus", - } - path = CloudBuildClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_crypto_key_path(path) - assert expected == actual - -def test_network_path(): - project = "oyster" - network = "nudibranch" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) - actual = CloudBuildClient.network_path(project, network) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "project": "cuttlefish", - "network": "mussel", - } - path = CloudBuildClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_network_path(path) - assert expected == actual - -def test_repository_path(): - project = "winkle" - location = "nautilus" - connection = "scallop" - repository = "abalone" - expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - actual = CloudBuildClient.repository_path(project, location, connection, repository) - assert expected == actual - - -def test_parse_repository_path(): - expected = { - "project": "squid", - "location": "clam", - "connection": "whelk", - "repository": "octopus", - } - path = CloudBuildClient.repository_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_repository_path(path) - assert expected == actual - -def test_secret_version_path(): - project = "oyster" - secret = "nudibranch" - version = "cuttlefish" - expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - actual = CloudBuildClient.secret_version_path(project, secret, version) - assert expected == actual - - -def test_parse_secret_version_path(): - expected = { - "project": "mussel", - "secret": "winkle", - "version": "nautilus", - } - path = CloudBuildClient.secret_version_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_secret_version_path(path) - assert expected == actual - -def test_service_account_path(): - project = "scallop" - service_account = "abalone" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - actual = CloudBuildClient.service_account_path(project, service_account) - assert expected == actual - - -def test_parse_service_account_path(): - expected = { - "project": "squid", - "service_account": "clam", - } - path = CloudBuildClient.service_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_service_account_path(path) - assert expected == actual - -def test_subscription_path(): - project = "whelk" - subscription = "octopus" - expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - actual = CloudBuildClient.subscription_path(project, subscription) - assert expected == actual - - -def test_parse_subscription_path(): - expected = { - "project": "oyster", - "subscription": "nudibranch", - } - path = CloudBuildClient.subscription_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_subscription_path(path) - assert expected == actual - -def test_topic_path(): - project = "cuttlefish" - topic = "mussel" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - actual = CloudBuildClient.topic_path(project, topic) - assert expected == actual - - -def test_parse_topic_path(): - expected = { - "project": "winkle", - "topic": "nautilus", - } - path = CloudBuildClient.topic_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_topic_path(path) - assert expected == actual - -def test_worker_pool_path(): - project = "scallop" - location = "abalone" - worker_pool = "squid" - expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(project=project, location=location, worker_pool=worker_pool, ) - actual = CloudBuildClient.worker_pool_path(project, location, worker_pool) - assert expected == actual - - -def test_parse_worker_pool_path(): - expected = { - "project": "clam", - "location": "whelk", - "worker_pool": "octopus", - } - path = CloudBuildClient.worker_pool_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_worker_pool_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudBuildClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = CloudBuildClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudBuildClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = CloudBuildClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudBuildClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = CloudBuildClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = CloudBuildClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = CloudBuildClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudBuildClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = CloudBuildClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CloudBuildTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudBuildClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index a0cf72db..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/devtools/cloudbuild/__init__.py - google/cloud/devtools/cloudbuild/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v2/.flake8 b/owl-bot-staging/v2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 6f731ec0..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/devtools/cloudbuild *.py -recursive-include google/cloud/devtools/cloudbuild_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index c788a1b3..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Devtools Cloudbuild API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Devtools Cloudbuild API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst deleted file mode 100644 index f4d9c5e2..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/repository_manager.rst +++ /dev/null @@ -1,10 +0,0 @@ -RepositoryManager ------------------------------------ - -.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager - :members: - :inherited-members: - -.. automodule:: google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst deleted file mode 100644 index c055be1a..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Devtools Cloudbuild v2 API -==================================================== -.. toctree:: - :maxdepth: 2 - - repository_manager diff --git a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst b/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst deleted file mode 100644 index 2148aa78..00000000 --- a/owl-bot-staging/v2/docs/cloudbuild_v2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Devtools Cloudbuild v2 API -================================================= - -.. automodule:: google.cloud.devtools.cloudbuild_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index 4bd8e2dd..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-build documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-build" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Devtools Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-build-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-build.tex", - u"google-cloud-build Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-build", - u"Google Cloud Devtools Cloudbuild Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-build", - u"google-cloud-build Documentation", - author, - "google-cloud-build", - "GAPIC library for Google Cloud Devtools Cloudbuild API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index 476758ee..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - cloudbuild_v2/services - cloudbuild_v2/types diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py deleted file mode 100644 index 47a5d13c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.devtools.cloudbuild_v2.services.repository_manager.client import RepositoryManagerClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager.async_client import RepositoryManagerAsyncClient - -from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import OperationMetadata -from google.cloud.devtools.cloudbuild_v2.types.cloudbuild import RunWorkflowCustomOperationMetadata -from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import BatchCreateRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import Connection -from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import CreateRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import DeleteRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchGitRefsResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchLinkableRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadTokenResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import FetchReadWriteTokenResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import GetConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import GetRepositoryRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitHubEnterpriseConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import GitLabConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import InstallationState -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListConnectionsResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import ListRepositoriesResponse -from google.cloud.devtools.cloudbuild_v2.types.repositories import OAuthCredential -from google.cloud.devtools.cloudbuild_v2.types.repositories import ProcessWebhookRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import Repository -from google.cloud.devtools.cloudbuild_v2.types.repositories import ServiceDirectoryConfig -from google.cloud.devtools.cloudbuild_v2.types.repositories import UpdateConnectionRequest -from google.cloud.devtools.cloudbuild_v2.types.repositories import UserCredential - -__all__ = ('RepositoryManagerClient', - 'RepositoryManagerAsyncClient', - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'Connection', - 'CreateConnectionRequest', - 'CreateRepositoryRequest', - 'DeleteConnectionRequest', - 'DeleteRepositoryRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenRequest', - 'FetchReadWriteTokenResponse', - 'GetConnectionRequest', - 'GetRepositoryRequest', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'InstallationState', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'OAuthCredential', - 'ProcessWebhookRequest', - 'Repository', - 'ServiceDirectoryConfig', - 'UpdateConnectionRequest', - 'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py deleted file mode 100644 index 6745dc72..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.repository_manager import RepositoryManagerClient -from .services.repository_manager import RepositoryManagerAsyncClient - -from .types.cloudbuild import OperationMetadata -from .types.cloudbuild import RunWorkflowCustomOperationMetadata -from .types.repositories import BatchCreateRepositoriesRequest -from .types.repositories import BatchCreateRepositoriesResponse -from .types.repositories import Connection -from .types.repositories import CreateConnectionRequest -from .types.repositories import CreateRepositoryRequest -from .types.repositories import DeleteConnectionRequest -from .types.repositories import DeleteRepositoryRequest -from .types.repositories import FetchGitRefsRequest -from .types.repositories import FetchGitRefsResponse -from .types.repositories import FetchLinkableRepositoriesRequest -from .types.repositories import FetchLinkableRepositoriesResponse -from .types.repositories import FetchReadTokenRequest -from .types.repositories import FetchReadTokenResponse -from .types.repositories import FetchReadWriteTokenRequest -from .types.repositories import FetchReadWriteTokenResponse -from .types.repositories import GetConnectionRequest -from .types.repositories import GetRepositoryRequest -from .types.repositories import GitHubConfig -from .types.repositories import GitHubEnterpriseConfig -from .types.repositories import GitLabConfig -from .types.repositories import InstallationState -from .types.repositories import ListConnectionsRequest -from .types.repositories import ListConnectionsResponse -from .types.repositories import ListRepositoriesRequest -from .types.repositories import ListRepositoriesResponse -from .types.repositories import OAuthCredential -from .types.repositories import ProcessWebhookRequest -from .types.repositories import Repository -from .types.repositories import ServiceDirectoryConfig -from .types.repositories import UpdateConnectionRequest -from .types.repositories import UserCredential - -__all__ = ( - 'RepositoryManagerAsyncClient', -'BatchCreateRepositoriesRequest', -'BatchCreateRepositoriesResponse', -'Connection', -'CreateConnectionRequest', -'CreateRepositoryRequest', -'DeleteConnectionRequest', -'DeleteRepositoryRequest', -'FetchGitRefsRequest', -'FetchGitRefsResponse', -'FetchLinkableRepositoriesRequest', -'FetchLinkableRepositoriesResponse', -'FetchReadTokenRequest', -'FetchReadTokenResponse', -'FetchReadWriteTokenRequest', -'FetchReadWriteTokenResponse', -'GetConnectionRequest', -'GetRepositoryRequest', -'GitHubConfig', -'GitHubEnterpriseConfig', -'GitLabConfig', -'InstallationState', -'ListConnectionsRequest', -'ListConnectionsResponse', -'ListRepositoriesRequest', -'ListRepositoriesResponse', -'OAuthCredential', -'OperationMetadata', -'ProcessWebhookRequest', -'Repository', -'RepositoryManagerClient', -'RunWorkflowCustomOperationMetadata', -'ServiceDirectoryConfig', -'UpdateConnectionRequest', -'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json deleted file mode 100644 index 2e77ddd0..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json +++ /dev/null @@ -1,238 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.devtools.cloudbuild_v2", - "protoPackage": "google.devtools.cloudbuild.v2", - "schema": "1.0", - "services": { - "RepositoryManager": { - "clients": { - "grpc": { - "libraryClient": "RepositoryManagerClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "grpc-async": { - "libraryClient": "RepositoryManagerAsyncClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "rest": { - "libraryClient": "RepositoryManagerClient", - "rpcs": { - "BatchCreateRepositories": { - "methods": [ - "batch_create_repositories" - ] - }, - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "CreateRepository": { - "methods": [ - "create_repository" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "DeleteRepository": { - "methods": [ - "delete_repository" - ] - }, - "FetchGitRefs": { - "methods": [ - "fetch_git_refs" - ] - }, - "FetchLinkableRepositories": { - "methods": [ - "fetch_linkable_repositories" - ] - }, - "FetchReadToken": { - "methods": [ - "fetch_read_token" - ] - }, - "FetchReadWriteToken": { - "methods": [ - "fetch_read_write_token" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetRepository": { - "methods": [ - "get_repository" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "ListRepositories": { - "methods": [ - "list_repositories" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed deleted file mode 100644 index 6070c14c..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-build package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py deleted file mode 100644 index 89a37dc9..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py deleted file mode 100644 index 4477dbda..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import RepositoryManagerClient -from .async_client import RepositoryManagerAsyncClient - -__all__ = ( - 'RepositoryManagerClient', - 'RepositoryManagerAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py deleted file mode 100644 index f0355efe..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/async_client.py +++ /dev/null @@ -1,2257 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .client import RepositoryManagerClient - - -class RepositoryManagerAsyncClient: - """Manages connections to source code repositories.""" - - _client: RepositoryManagerClient - - DEFAULT_ENDPOINT = RepositoryManagerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = RepositoryManagerClient.DEFAULT_MTLS_ENDPOINT - - connection_path = staticmethod(RepositoryManagerClient.connection_path) - parse_connection_path = staticmethod(RepositoryManagerClient.parse_connection_path) - repository_path = staticmethod(RepositoryManagerClient.repository_path) - parse_repository_path = staticmethod(RepositoryManagerClient.parse_repository_path) - secret_version_path = staticmethod(RepositoryManagerClient.secret_version_path) - parse_secret_version_path = staticmethod(RepositoryManagerClient.parse_secret_version_path) - service_path = staticmethod(RepositoryManagerClient.service_path) - parse_service_path = staticmethod(RepositoryManagerClient.parse_service_path) - common_billing_account_path = staticmethod(RepositoryManagerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(RepositoryManagerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(RepositoryManagerClient.common_folder_path) - parse_common_folder_path = staticmethod(RepositoryManagerClient.parse_common_folder_path) - common_organization_path = staticmethod(RepositoryManagerClient.common_organization_path) - parse_common_organization_path = staticmethod(RepositoryManagerClient.parse_common_organization_path) - common_project_path = staticmethod(RepositoryManagerClient.common_project_path) - parse_common_project_path = staticmethod(RepositoryManagerClient.parse_common_project_path) - common_location_path = staticmethod(RepositoryManagerClient.common_location_path) - parse_common_location_path = staticmethod(RepositoryManagerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerAsyncClient: The constructed client. - """ - return RepositoryManagerClient.from_service_account_info.__func__(RepositoryManagerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerAsyncClient: The constructed client. - """ - return RepositoryManagerClient.from_service_account_file.__func__(RepositoryManagerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return RepositoryManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> RepositoryManagerTransport: - """Returns the transport used by the client instance. - - Returns: - RepositoryManagerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(RepositoryManagerClient).get_transport_class, type(RepositoryManagerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, RepositoryManagerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the repository manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.RepositoryManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = RepositoryManagerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_connection(self, - request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[repositories.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]]): - The request object. Message for creating a Connection - parent (:class:`str`): - Required. Project and location where the connection will - be created. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): - Required. The Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (:class:`str`): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. - Allows alphanumeric characters and any of - -._~%!$&'()*+,;=@. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.CreateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_connection(self, - request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Connection: - r"""Gets details of a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]]): - The request object. Message for getting the details of a - Connection. - name (:class:`str`): - Required. The name of the Connection to retrieve. - Format: ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.GetConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_connection, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_connections(self, - request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionsAsyncPager: - r"""Lists Connections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]]): - The request object. Message for requesting list of - Connections. - parent (:class:`str`): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager: - Message for response to listing - Connections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.ListConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_connections, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_connection(self, - request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, - *, - connection: Optional[repositories.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]]): - The request object. Message for updating a Connection. - connection (:class:`google.cloud.devtools.cloudbuild_v2.types.Connection`): - Required. The Connection to update. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.UpdateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection.name", request.connection.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_connection(self, - request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]]): - The request object. Message for deleting a Connection. - name (:class:`str`): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.DeleteConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_connection, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_repository(self, - request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - repository: Optional[repositories.Repository] = None, - repository_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a Repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]]): - The request object. Message for creating a Repository. - parent (:class:`str`): - Required. The connection to contain - the repository. If the request is part - of a BatchCreateRepositoriesRequest, - this field should be empty or match the - parent specified there. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository (:class:`google.cloud.devtools.cloudbuild_v2.types.Repository`): - Required. The repository to create. - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository_id (:class:`str`): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - - This corresponds to the ``repository_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` - A repository associated to a parent connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, repository, repository_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.CreateRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if repository is not None: - request.repository = repository - if repository_id is not None: - request.repository_id = repository_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_repository, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.Repository, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def batch_create_repositories(self, - request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates multiple repositories inside a connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]]): - The request object. Message for creating repositoritories - in batch. - parent (:class:`str`): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field - in the CreateRepositoryRequest messages must either be - empty or match this field. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - requests (:class:`MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]`): - Required. The request messages - specifying the repositories to create. - - This corresponds to the ``requests`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` - Message for response of creating repositories in batch. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, requests]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.BatchCreateRepositoriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if requests: - request.requests.extend(requests) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_create_repositories, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - repositories.BatchCreateRepositoriesResponse, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_repository(self, - request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Repository: - r"""Gets details of a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_repository(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]]): - The request object. Message for getting the details of a - Repository. - name (:class:`str`): - Required. The name of the Repository to retrieve. - Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Repository: - A repository associated to a parent - connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.GetRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_repository, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_repositories(self, - request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListRepositoriesAsyncPager: - r"""Lists Repositories in a given connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]]): - The request object. Message for requesting list of - Repositories. - parent (:class:`str`): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager: - Message for response to listing - Repositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.ListRepositoriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListRepositoriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_repository(self, - request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]]): - The request object. Message for deleting a Repository. - name (:class:`str`): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.DeleteRepositoryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_repository, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - async def fetch_read_write_token(self, - request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Fetches read/write token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]]): - The request object. Message for fetching SCM read/write - token. - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchReadWriteTokenRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_read_write_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_read_token(self, - request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadTokenResponse: - r"""Fetches read token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_token(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]]): - The request object. Message for fetching SCM read token. - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchReadTokenRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_read_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_linkable_repositories(self, - request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchLinkableRepositoriesAsyncPager: - r"""FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]]): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager: - Response message for - FetchLinkableRepositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = repositories.FetchLinkableRepositoriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_linkable_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection", request.connection), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.FetchLinkableRepositoriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_git_refs(self, - request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchGitRefsResponse: - r"""Fetch the list of branches or tags for a given - repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - async def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_git_refs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]]): - The request object. Request for fetching git refs - repository (:class:`str`): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: - Response for fetching git refs - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = repositories.FetchGitRefsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.fetch_git_refs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "RepositoryManagerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "RepositoryManagerAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py deleted file mode 100644 index 79743795..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ /dev/null @@ -1,2445 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import RepositoryManagerGrpcTransport -from .transports.grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .transports.rest import RepositoryManagerRestTransport - - -class RepositoryManagerClientMeta(type): - """Metaclass for the RepositoryManager client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] - _transport_registry["grpc"] = RepositoryManagerGrpcTransport - _transport_registry["grpc_asyncio"] = RepositoryManagerGrpcAsyncIOTransport - _transport_registry["rest"] = RepositoryManagerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[RepositoryManagerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class RepositoryManagerClient(metaclass=RepositoryManagerClientMeta): - """Manages connections to source code repositories.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - RepositoryManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> RepositoryManagerTransport: - """Returns the transport used by the client instance. - - Returns: - RepositoryManagerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def repository_path(project: str,location: str,connection: str,repository: str,) -> str: - """Returns a fully-qualified repository string.""" - return "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - - @staticmethod - def parse_repository_path(path: str) -> Dict[str,str]: - """Parses a repository path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)/repositories/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def secret_version_path(project: str,secret: str,version: str,) -> str: - """Returns a fully-qualified secret_version string.""" - return "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - - @staticmethod - def parse_secret_version_path(path: str) -> Dict[str,str]: - """Parses a secret_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/secrets/(?P.+?)/versions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,namespace: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RepositoryManagerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the repository manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, RepositoryManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, RepositoryManagerTransport): - # transport is a RepositoryManagerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_connection(self, - request: Optional[Union[repositories.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[repositories.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a Connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest, dict]): - The request object. Message for creating a Connection - parent (str): - Required. Project and location where the connection will - be created. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (str): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. - Allows alphanumeric characters and any of - -._~%!$&'()*+,;=@. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.CreateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.CreateConnectionRequest): - request = repositories.CreateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_connection(self, - request: Optional[Union[repositories.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Connection: - r"""Gets details of a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest, dict]): - The request object. Message for getting the details of a - Connection. - name (str): - Required. The name of the Connection to retrieve. - Format: ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.GetConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.GetConnectionRequest): - request = repositories.GetConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_connections(self, - request: Optional[Union[repositories.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListConnectionsPager: - r"""Lists Connections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest, dict]): - The request object. Message for requesting list of - Connections. - parent (str): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager: - Message for response to listing - Connections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.ListConnectionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.ListConnectionsRequest): - request = repositories.ListConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConnectionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_connection(self, - request: Optional[Union[repositories.UpdateConnectionRequest, dict]] = None, - *, - connection: Optional[repositories.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest, dict]): - The request object. Message for updating a Connection. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to update. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.devtools.cloudbuild_v2.types.Connection` A connection to a SCM like GitHub, GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([connection, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.UpdateConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.UpdateConnectionRequest): - request = repositories.UpdateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection.name", request.connection.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Connection, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_connection(self, - request: Optional[Union[repositories.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest, dict]): - The request object. Message for deleting a Connection. - name (str): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.DeleteConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.DeleteConnectionRequest): - request = repositories.DeleteConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_repository(self, - request: Optional[Union[repositories.CreateRepositoryRequest, dict]] = None, - *, - parent: Optional[str] = None, - repository: Optional[repositories.Repository] = None, - repository_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a Repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest, dict]): - The request object. Message for creating a Repository. - parent (str): - Required. The connection to contain - the repository. If the request is part - of a BatchCreateRepositoriesRequest, - this field should be empty or match the - parent specified there. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository (google.cloud.devtools.cloudbuild_v2.types.Repository): - Required. The repository to create. - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - repository_id (str): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - - This corresponds to the ``repository_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.Repository` - A repository associated to a parent connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, repository, repository_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.CreateRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.CreateRepositoryRequest): - request = repositories.CreateRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if repository is not None: - request.repository = repository - if repository_id is not None: - request.repository_id = repository_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.Repository, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def batch_create_repositories(self, - request: Optional[Union[repositories.BatchCreateRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - requests: Optional[MutableSequence[repositories.CreateRepositoryRequest]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates multiple repositories inside a connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest, dict]): - The request object. Message for creating repositoritories - in batch. - parent (str): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field - in the CreateRepositoryRequest messages must either be - empty or match this field. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): - Required. The request messages - specifying the repositories to create. - - This corresponds to the ``requests`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesResponse` - Message for response of creating repositories in batch. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, requests]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.BatchCreateRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.BatchCreateRepositoriesRequest): - request = repositories.BatchCreateRepositoriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if requests is not None: - request.requests = requests - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_create_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - repositories.BatchCreateRepositoriesResponse, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_repository(self, - request: Optional[Union[repositories.GetRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.Repository: - r"""Gets details of a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = client.get_repository(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest, dict]): - The request object. Message for getting the details of a - Repository. - name (str): - Required. The name of the Repository to retrieve. - Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.Repository: - A repository associated to a parent - connection. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.GetRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.GetRepositoryRequest): - request = repositories.GetRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_repositories(self, - request: Optional[Union[repositories.ListRepositoriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListRepositoriesPager: - r"""Lists Repositories in a given connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest, dict]): - The request object. Message for requesting list of - Repositories. - parent (str): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager: - Message for response to listing - Repositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.ListRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.ListRepositoriesRequest): - request = repositories.ListRepositoriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListRepositoriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_repository(self, - request: Optional[Union[repositories.DeleteRepositoryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest, dict]): - The request object. Message for deleting a Repository. - name (str): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.DeleteRepositoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.DeleteRepositoryRequest): - request = repositories.DeleteRepositoryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_repository] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloudbuild.OperationMetadata, - ) - - # Done; return the response. - return response - - def fetch_read_write_token(self, - request: Optional[Union[repositories.FetchReadWriteTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Fetches read/write token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest, dict]): - The request object. Message for fetching SCM read/write - token. - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchReadWriteTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchReadWriteTokenRequest): - request = repositories.FetchReadWriteTokenRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_read_write_token] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_read_token(self, - request: Optional[Union[repositories.FetchReadTokenRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchReadTokenResponse: - r"""Fetches read token of a given repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_token(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest, dict]): - The request object. Message for fetching SCM read token. - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchReadTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchReadTokenRequest): - request = repositories.FetchReadTokenRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_read_token] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_linkable_repositories(self, - request: Optional[Union[repositories.FetchLinkableRepositoriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.FetchLinkableRepositoriesPager: - r"""FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest, dict]): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager: - Response message for - FetchLinkableRepositories. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchLinkableRepositoriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchLinkableRepositoriesRequest): - request = repositories.FetchLinkableRepositoriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_linkable_repositories] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("connection", request.connection), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.FetchLinkableRepositoriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_git_refs(self, - request: Optional[Union[repositories.FetchGitRefsRequest, dict]] = None, - *, - repository: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> repositories.FetchGitRefsResponse: - r"""Fetch the list of branches or tags for a given - repository. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.devtools import cloudbuild_v2 - - def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_git_refs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest, dict]): - The request object. Request for fetching git refs - repository (str): - Required. The resource name of the repository in the - format - ``projects/*/locations/*/connections/*/repositories/*``. - - This corresponds to the ``repository`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse: - Response for fetching git refs - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([repository]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a repositories.FetchGitRefsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, repositories.FetchGitRefsRequest): - request = repositories.FetchGitRefsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if repository is not None: - request.repository = repository - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_git_refs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("repository", request.repository), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "RepositoryManagerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "RepositoryManagerClient", -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py deleted file mode 100644 index 1ae879b6..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/pagers.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.devtools.cloudbuild_v2.types import repositories - - -class ListConnectionsPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.ListConnectionsResponse], - request: repositories.ListConnectionsRequest, - response: repositories.ListConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Connection]: - for page in self.pages: - yield from page.connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionsAsyncPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.ListConnectionsResponse]], - request: repositories.ListConnectionsRequest, - response: repositories.ListConnectionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListConnectionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListConnectionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Connection]: - async def async_generator(): - async for page in self.pages: - for response in page.connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListRepositoriesPager: - """A pager for iterating through ``list_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.ListRepositoriesResponse], - request: repositories.ListRepositoriesRequest, - response: repositories.ListRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.ListRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Repository]: - for page in self.pages: - yield from page.repositories - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListRepositoriesAsyncPager: - """A pager for iterating through ``list_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.ListRepositoriesResponse]], - request: repositories.ListRepositoriesRequest, - response: repositories.ListRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.ListRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.ListRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Repository]: - async def async_generator(): - async for page in self.pages: - for response in page.repositories: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchLinkableRepositoriesPager: - """A pager for iterating through ``fetch_linkable_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``FetchLinkableRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., repositories.FetchLinkableRepositoriesResponse], - request: repositories.FetchLinkableRepositoriesRequest, - response: repositories.FetchLinkableRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.FetchLinkableRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[repositories.FetchLinkableRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[repositories.Repository]: - for page in self.pages: - yield from page.repositories - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchLinkableRepositoriesAsyncPager: - """A pager for iterating through ``fetch_linkable_repositories`` requests. - - This class thinly wraps an initial - :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``repositories`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``FetchLinkableRepositories`` requests and continue to iterate - through the ``repositories`` field on the - corresponding responses. - - All the usual :class:`google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[repositories.FetchLinkableRepositoriesResponse]], - request: repositories.FetchLinkableRepositoriesRequest, - response: repositories.FetchLinkableRepositoriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest): - The initial request object. - response (google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = repositories.FetchLinkableRepositoriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[repositories.FetchLinkableRepositoriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[repositories.Repository]: - async def async_generator(): - async for page in self.pages: - for response in page.repositories: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py deleted file mode 100644 index b912a799..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import RepositoryManagerTransport -from .grpc import RepositoryManagerGrpcTransport -from .grpc_asyncio import RepositoryManagerGrpcAsyncIOTransport -from .rest import RepositoryManagerRestTransport -from .rest import RepositoryManagerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[RepositoryManagerTransport]] -_transport_registry['grpc'] = RepositoryManagerGrpcTransport -_transport_registry['grpc_asyncio'] = RepositoryManagerGrpcAsyncIOTransport -_transport_registry['rest'] = RepositoryManagerRestTransport - -__all__ = ( - 'RepositoryManagerTransport', - 'RepositoryManagerGrpcTransport', - 'RepositoryManagerGrpcAsyncIOTransport', - 'RepositoryManagerRestTransport', - 'RepositoryManagerRestInterceptor', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py deleted file mode 100644 index 28b1d7a8..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/base.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class RepositoryManagerTransport(abc.ABC): - """Abstract transport class for RepositoryManager.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudbuild.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_connection: gapic_v1.method.wrap_method( - self.create_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_connection: gapic_v1.method.wrap_method( - self.get_connection, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_connections: gapic_v1.method.wrap_method( - self.list_connections, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_connection: gapic_v1.method.wrap_method( - self.update_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_connection: gapic_v1.method.wrap_method( - self.delete_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.create_repository: gapic_v1.method.wrap_method( - self.create_repository, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_create_repositories: gapic_v1.method.wrap_method( - self.batch_create_repositories, - default_timeout=None, - client_info=client_info, - ), - self.get_repository: gapic_v1.method.wrap_method( - self.get_repository, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_repositories: gapic_v1.method.wrap_method( - self.list_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_repository: gapic_v1.method.wrap_method( - self.delete_repository, - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_read_write_token: gapic_v1.method.wrap_method( - self.fetch_read_write_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_read_token: gapic_v1.method.wrap_method( - self.fetch_read_token, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_linkable_repositories: gapic_v1.method.wrap_method( - self.fetch_linkable_repositories, - default_retry=retries.Retry( -initial=1.0,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_git_refs: gapic_v1.method.wrap_method( - self.fetch_git_refs, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - Union[ - repositories.Connection, - Awaitable[repositories.Connection] - ]]: - raise NotImplementedError() - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - Union[ - repositories.ListConnectionsResponse, - Awaitable[repositories.ListConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - Union[ - repositories.Repository, - Awaitable[repositories.Repository] - ]]: - raise NotImplementedError() - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - Union[ - repositories.ListRepositoriesResponse, - Awaitable[repositories.ListRepositoriesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - Union[ - repositories.FetchReadWriteTokenResponse, - Awaitable[repositories.FetchReadWriteTokenResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - Union[ - repositories.FetchReadTokenResponse, - Awaitable[repositories.FetchReadTokenResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - Union[ - repositories.FetchLinkableRepositoriesResponse, - Awaitable[repositories.FetchLinkableRepositoriesResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - Union[ - repositories.FetchGitRefsResponse, - Awaitable[repositories.FetchGitRefsResponse] - ]]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'RepositoryManagerTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py deleted file mode 100644 index 2507acae..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc.py +++ /dev/null @@ -1,743 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO - - -class RepositoryManagerGrpcTransport(RepositoryManagerTransport): - """gRPC backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create connection method over gRPC. - - Creates a Connection. - - Returns: - Callable[[~.CreateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', - request_serializer=repositories.CreateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - repositories.Connection]: - r"""Return a callable for the get connection method over gRPC. - - Gets details of a single connection. - - Returns: - Callable[[~.GetConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', - request_serializer=repositories.GetConnectionRequest.serialize, - response_deserializer=repositories.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - repositories.ListConnectionsResponse]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a given project and location. - - Returns: - Callable[[~.ListConnectionsRequest], - ~.ListConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', - request_serializer=repositories.ListConnectionsRequest.serialize, - response_deserializer=repositories.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the update connection method over gRPC. - - Updates a single connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', - request_serializer=repositories.UpdateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes a single connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', - request_serializer=repositories.DeleteConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection'] - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - operations_pb2.Operation]: - r"""Return a callable for the create repository method over gRPC. - - Creates a Repository. - - Returns: - Callable[[~.CreateRepositoryRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_repository' not in self._stubs: - self._stubs['create_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', - request_serializer=repositories.CreateRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_repository'] - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - operations_pb2.Operation]: - r"""Return a callable for the batch create repositories method over gRPC. - - Creates multiple repositories inside a connection. - - Returns: - Callable[[~.BatchCreateRepositoriesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_repositories' not in self._stubs: - self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', - request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_create_repositories'] - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - repositories.Repository]: - r"""Return a callable for the get repository method over gRPC. - - Gets details of a single repository. - - Returns: - Callable[[~.GetRepositoryRequest], - ~.Repository]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_repository' not in self._stubs: - self._stubs['get_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', - request_serializer=repositories.GetRepositoryRequest.serialize, - response_deserializer=repositories.Repository.deserialize, - ) - return self._stubs['get_repository'] - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - repositories.ListRepositoriesResponse]: - r"""Return a callable for the list repositories method over gRPC. - - Lists Repositories in a given connection. - - Returns: - Callable[[~.ListRepositoriesRequest], - ~.ListRepositoriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_repositories' not in self._stubs: - self._stubs['list_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', - request_serializer=repositories.ListRepositoriesRequest.serialize, - response_deserializer=repositories.ListRepositoriesResponse.deserialize, - ) - return self._stubs['list_repositories'] - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete repository method over gRPC. - - Deletes a single repository. - - Returns: - Callable[[~.DeleteRepositoryRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_repository' not in self._stubs: - self._stubs['delete_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', - request_serializer=repositories.DeleteRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_repository'] - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - repositories.FetchReadWriteTokenResponse]: - r"""Return a callable for the fetch read write token method over gRPC. - - Fetches read/write token of a given repository. - - Returns: - Callable[[~.FetchReadWriteTokenRequest], - ~.FetchReadWriteTokenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_write_token' not in self._stubs: - self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', - request_serializer=repositories.FetchReadWriteTokenRequest.serialize, - response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, - ) - return self._stubs['fetch_read_write_token'] - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - repositories.FetchReadTokenResponse]: - r"""Return a callable for the fetch read token method over gRPC. - - Fetches read token of a given repository. - - Returns: - Callable[[~.FetchReadTokenRequest], - ~.FetchReadTokenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_token' not in self._stubs: - self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', - request_serializer=repositories.FetchReadTokenRequest.serialize, - response_deserializer=repositories.FetchReadTokenResponse.deserialize, - ) - return self._stubs['fetch_read_token'] - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - repositories.FetchLinkableRepositoriesResponse]: - r"""Return a callable for the fetch linkable repositories method over gRPC. - - FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - Returns: - Callable[[~.FetchLinkableRepositoriesRequest], - ~.FetchLinkableRepositoriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_linkable_repositories' not in self._stubs: - self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', - request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, - response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, - ) - return self._stubs['fetch_linkable_repositories'] - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - repositories.FetchGitRefsResponse]: - r"""Return a callable for the fetch git refs method over gRPC. - - Fetch the list of branches or tags for a given - repository. - - Returns: - Callable[[~.FetchGitRefsRequest], - ~.FetchGitRefsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_git_refs' not in self._stubs: - self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', - request_serializer=repositories.FetchGitRefsRequest.serialize, - response_deserializer=repositories.FetchGitRefsResponse.deserialize, - ) - return self._stubs['fetch_git_refs'] - - def close(self): - self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'RepositoryManagerGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py deleted file mode 100644 index 55d562a9..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/grpc_asyncio.py +++ /dev/null @@ -1,742 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO -from .grpc import RepositoryManagerGrpcTransport - - -class RepositoryManagerGrpcAsyncIOTransport(RepositoryManagerTransport): - """gRPC AsyncIO backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create connection method over gRPC. - - Creates a Connection. - - Returns: - Callable[[~.CreateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateConnection', - request_serializer=repositories.CreateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - Awaitable[repositories.Connection]]: - r"""Return a callable for the get connection method over gRPC. - - Gets details of a single connection. - - Returns: - Callable[[~.GetConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetConnection', - request_serializer=repositories.GetConnectionRequest.serialize, - response_deserializer=repositories.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - Awaitable[repositories.ListConnectionsResponse]]: - r"""Return a callable for the list connections method over gRPC. - - Lists Connections in a given project and location. - - Returns: - Callable[[~.ListConnectionsRequest], - Awaitable[~.ListConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListConnections', - request_serializer=repositories.ListConnectionsRequest.serialize, - response_deserializer=repositories.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update connection method over gRPC. - - Updates a single connection. - - Returns: - Callable[[~.UpdateConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/UpdateConnection', - request_serializer=repositories.UpdateConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes a single connection. - - Returns: - Callable[[~.DeleteConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteConnection', - request_serializer=repositories.DeleteConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_connection'] - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create repository method over gRPC. - - Creates a Repository. - - Returns: - Callable[[~.CreateRepositoryRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_repository' not in self._stubs: - self._stubs['create_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/CreateRepository', - request_serializer=repositories.CreateRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_repository'] - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the batch create repositories method over gRPC. - - Creates multiple repositories inside a connection. - - Returns: - Callable[[~.BatchCreateRepositoriesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_repositories' not in self._stubs: - self._stubs['batch_create_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/BatchCreateRepositories', - request_serializer=repositories.BatchCreateRepositoriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_create_repositories'] - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - Awaitable[repositories.Repository]]: - r"""Return a callable for the get repository method over gRPC. - - Gets details of a single repository. - - Returns: - Callable[[~.GetRepositoryRequest], - Awaitable[~.Repository]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_repository' not in self._stubs: - self._stubs['get_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/GetRepository', - request_serializer=repositories.GetRepositoryRequest.serialize, - response_deserializer=repositories.Repository.deserialize, - ) - return self._stubs['get_repository'] - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - Awaitable[repositories.ListRepositoriesResponse]]: - r"""Return a callable for the list repositories method over gRPC. - - Lists Repositories in a given connection. - - Returns: - Callable[[~.ListRepositoriesRequest], - Awaitable[~.ListRepositoriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_repositories' not in self._stubs: - self._stubs['list_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/ListRepositories', - request_serializer=repositories.ListRepositoriesRequest.serialize, - response_deserializer=repositories.ListRepositoriesResponse.deserialize, - ) - return self._stubs['list_repositories'] - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete repository method over gRPC. - - Deletes a single repository. - - Returns: - Callable[[~.DeleteRepositoryRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_repository' not in self._stubs: - self._stubs['delete_repository'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/DeleteRepository', - request_serializer=repositories.DeleteRepositoryRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_repository'] - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - Awaitable[repositories.FetchReadWriteTokenResponse]]: - r"""Return a callable for the fetch read write token method over gRPC. - - Fetches read/write token of a given repository. - - Returns: - Callable[[~.FetchReadWriteTokenRequest], - Awaitable[~.FetchReadWriteTokenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_write_token' not in self._stubs: - self._stubs['fetch_read_write_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadWriteToken', - request_serializer=repositories.FetchReadWriteTokenRequest.serialize, - response_deserializer=repositories.FetchReadWriteTokenResponse.deserialize, - ) - return self._stubs['fetch_read_write_token'] - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - Awaitable[repositories.FetchReadTokenResponse]]: - r"""Return a callable for the fetch read token method over gRPC. - - Fetches read token of a given repository. - - Returns: - Callable[[~.FetchReadTokenRequest], - Awaitable[~.FetchReadTokenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_read_token' not in self._stubs: - self._stubs['fetch_read_token'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchReadToken', - request_serializer=repositories.FetchReadTokenRequest.serialize, - response_deserializer=repositories.FetchReadTokenResponse.deserialize, - ) - return self._stubs['fetch_read_token'] - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - Awaitable[repositories.FetchLinkableRepositoriesResponse]]: - r"""Return a callable for the fetch linkable repositories method over gRPC. - - FetchLinkableRepositories get repositories from SCM - that are accessible and could be added to the - connection. - - Returns: - Callable[[~.FetchLinkableRepositoriesRequest], - Awaitable[~.FetchLinkableRepositoriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_linkable_repositories' not in self._stubs: - self._stubs['fetch_linkable_repositories'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchLinkableRepositories', - request_serializer=repositories.FetchLinkableRepositoriesRequest.serialize, - response_deserializer=repositories.FetchLinkableRepositoriesResponse.deserialize, - ) - return self._stubs['fetch_linkable_repositories'] - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - Awaitable[repositories.FetchGitRefsResponse]]: - r"""Return a callable for the fetch git refs method over gRPC. - - Fetch the list of branches or tags for a given - repository. - - Returns: - Callable[[~.FetchGitRefsRequest], - Awaitable[~.FetchGitRefsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_git_refs' not in self._stubs: - self._stubs['fetch_git_refs'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v2.RepositoryManager/FetchGitRefs', - request_serializer=repositories.FetchGitRefsRequest.serialize, - response_deserializer=repositories.FetchGitRefsResponse.deserialize, - ) - return self._stubs['fetch_git_refs'] - - def close(self): - return self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'RepositoryManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py deleted file mode 100644 index 365c9861..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py +++ /dev/null @@ -1,2275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.longrunning import operations_pb2 # type: ignore - -from .base import RepositoryManagerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class RepositoryManagerRestInterceptor: - """Interceptor for RepositoryManager. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the RepositoryManagerRestTransport. - - .. code-block:: python - class MyCustomRepositoryManagerInterceptor(RepositoryManagerRestInterceptor): - def pre_batch_create_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_create_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_git_refs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_git_refs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_linkable_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_linkable_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_read_token(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_read_token(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_read_write_token(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_read_write_token(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_repository(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_repository(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_repositories(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_repositories(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_connection(self, response): - logging.log(f"Received response: {response}") - return response - - transport = RepositoryManagerRestTransport(interceptor=MyCustomRepositoryManagerInterceptor()) - client = RepositoryManagerClient(transport=transport) - - - """ - def pre_batch_create_repositories(self, request: repositories.BatchCreateRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.BatchCreateRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for batch_create_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_batch_create_repositories(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for batch_create_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_create_connection(self, request: repositories.CreateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_create_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_create_repository(self, request: repositories.CreateRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.CreateRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_create_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_delete_connection(self, request: repositories.DeleteConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_delete_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_delete_repository(self, request: repositories.DeleteRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.DeleteRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_delete_repository(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_git_refs(self, request: repositories.FetchGitRefsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchGitRefsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_git_refs - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_git_refs(self, response: repositories.FetchGitRefsResponse) -> repositories.FetchGitRefsResponse: - """Post-rpc interceptor for fetch_git_refs - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_linkable_repositories(self, request: repositories.FetchLinkableRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchLinkableRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_linkable_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_linkable_repositories(self, response: repositories.FetchLinkableRepositoriesResponse) -> repositories.FetchLinkableRepositoriesResponse: - """Post-rpc interceptor for fetch_linkable_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_read_token(self, request: repositories.FetchReadTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadTokenRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_read_token - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_read_token(self, response: repositories.FetchReadTokenResponse) -> repositories.FetchReadTokenResponse: - """Post-rpc interceptor for fetch_read_token - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_fetch_read_write_token(self, request: repositories.FetchReadWriteTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.FetchReadWriteTokenRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for fetch_read_write_token - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_fetch_read_write_token(self, response: repositories.FetchReadWriteTokenResponse) -> repositories.FetchReadWriteTokenResponse: - """Post-rpc interceptor for fetch_read_write_token - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_connection(self, request: repositories.GetConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_connection(self, response: repositories.Connection) -> repositories.Connection: - """Post-rpc interceptor for get_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_repository(self, request: repositories.GetRepositoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.GetRepositoryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_repository - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_repository(self, response: repositories.Repository) -> repositories.Repository: - """Post-rpc interceptor for get_repository - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_list_connections(self, request: repositories.ListConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListConnectionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_list_connections(self, response: repositories.ListConnectionsResponse) -> repositories.ListConnectionsResponse: - """Post-rpc interceptor for list_connections - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_list_repositories(self, request: repositories.ListRepositoriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.ListRepositoriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_repositories - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_list_repositories(self, response: repositories.ListRepositoriesResponse) -> repositories.ListRepositoriesResponse: - """Post-rpc interceptor for list_repositories - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_update_connection(self, request: repositories.UpdateConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[repositories.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_update_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_connection - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the RepositoryManager server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the RepositoryManager server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class RepositoryManagerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: RepositoryManagerRestInterceptor - - -class RepositoryManagerRestTransport(RepositoryManagerTransport): - """REST backend transport for RepositoryManager. - - Manages connections to source code repositories. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[RepositoryManagerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or RepositoryManagerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v2") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _BatchCreateRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("BatchCreateRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.BatchCreateRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the batch create repositories method over HTTP. - - Args: - request (~.repositories.BatchCreateRepositoriesRequest): - The request object. Message for creating repositoritories - in batch. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_batch_create_repositories(request, metadata) - pb_request = repositories.BatchCreateRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_create_repositories(resp) - return resp - - class _CreateConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("CreateConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "connectionId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.CreateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create connection method over HTTP. - - Args: - request (~.repositories.CreateConnectionRequest): - The request object. Message for creating a Connection - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - 'body': 'connection', - }, - ] - request, metadata = self._interceptor.pre_create_connection(request, metadata) - pb_request = repositories.CreateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_connection(resp) - return resp - - class _CreateRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("CreateRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "repositoryId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.CreateRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create repository method over HTTP. - - Args: - request (~.repositories.CreateRepositoryRequest): - The request object. Message for creating a Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', - 'body': 'repository', - }, - ] - request, metadata = self._interceptor.pre_create_repository(request, metadata) - pb_request = repositories.CreateRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_repository(resp) - return resp - - class _DeleteConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("DeleteConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.DeleteConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete connection method over HTTP. - - Args: - request (~.repositories.DeleteConnectionRequest): - The request object. Message for deleting a Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_connection(request, metadata) - pb_request = repositories.DeleteConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_connection(resp) - return resp - - class _DeleteRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("DeleteRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.DeleteRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete repository method over HTTP. - - Args: - request (~.repositories.DeleteRepositoryRequest): - The request object. Message for deleting a Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_repository(request, metadata) - pb_request = repositories.DeleteRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_repository(resp) - return resp - - class _FetchGitRefs(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchGitRefs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchGitRefsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchGitRefsResponse: - r"""Call the fetch git refs method over HTTP. - - Args: - request (~.repositories.FetchGitRefsRequest): - The request object. Request for fetching git refs - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchGitRefsResponse: - Response for fetching git refs - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs', - }, - ] - request, metadata = self._interceptor.pre_fetch_git_refs(request, metadata) - pb_request = repositories.FetchGitRefsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchGitRefsResponse() - pb_resp = repositories.FetchGitRefsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_git_refs(resp) - return resp - - class _FetchLinkableRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchLinkableRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchLinkableRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchLinkableRepositoriesResponse: - r"""Call the fetch linkable - repositories method over HTTP. - - Args: - request (~.repositories.FetchLinkableRepositoriesRequest): - The request object. Request message for - FetchLinkableRepositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchLinkableRepositoriesResponse: - Response message for - FetchLinkableRepositories. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{connection=projects/*/locations/*/connections/*}:fetchLinkableRepositories', - }, - ] - request, metadata = self._interceptor.pre_fetch_linkable_repositories(request, metadata) - pb_request = repositories.FetchLinkableRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchLinkableRepositoriesResponse() - pb_resp = repositories.FetchLinkableRepositoriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_linkable_repositories(resp) - return resp - - class _FetchReadToken(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchReadToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchReadTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchReadTokenResponse: - r"""Call the fetch read token method over HTTP. - - Args: - request (~.repositories.FetchReadTokenRequest): - The request object. Message for fetching SCM read token. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchReadTokenResponse: - Message for responding to get read - token. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_fetch_read_token(request, metadata) - pb_request = repositories.FetchReadTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchReadTokenResponse() - pb_resp = repositories.FetchReadTokenResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_read_token(resp) - return resp - - class _FetchReadWriteToken(RepositoryManagerRestStub): - def __hash__(self): - return hash("FetchReadWriteToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.FetchReadWriteTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.FetchReadWriteTokenResponse: - r"""Call the fetch read write token method over HTTP. - - Args: - request (~.repositories.FetchReadWriteTokenRequest): - The request object. Message for fetching SCM read/write - token. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.FetchReadWriteTokenResponse: - Message for responding to get - read/write token. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_fetch_read_write_token(request, metadata) - pb_request = repositories.FetchReadWriteTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.FetchReadWriteTokenResponse() - pb_resp = repositories.FetchReadWriteTokenResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_read_write_token(resp) - return resp - - class _GetConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("GetConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.GetConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.Connection: - r"""Call the get connection method over HTTP. - - Args: - request (~.repositories.GetConnectionRequest): - The request object. Message for getting the details of a - Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.Connection: - A connection to a SCM like GitHub, - GitHub Enterprise, Bitbucket Server or - GitLab. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/connections/*}', - }, - ] - request, metadata = self._interceptor.pre_get_connection(request, metadata) - pb_request = repositories.GetConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.Connection() - pb_resp = repositories.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_connection(resp) - return resp - - class _GetRepository(RepositoryManagerRestStub): - def __hash__(self): - return hash("GetRepository") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.GetRepositoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.Repository: - r"""Call the get repository method over HTTP. - - Args: - request (~.repositories.GetRepositoryRequest): - The request object. Message for getting the details of a - Repository. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.Repository: - A repository associated to a parent - connection. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/connections/*/repositories/*}', - }, - ] - request, metadata = self._interceptor.pre_get_repository(request, metadata) - pb_request = repositories.GetRepositoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.Repository() - pb_resp = repositories.Repository.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_repository(resp) - return resp - - class _ListConnections(RepositoryManagerRestStub): - def __hash__(self): - return hash("ListConnections") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.ListConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.ListConnectionsResponse: - r"""Call the list connections method over HTTP. - - Args: - request (~.repositories.ListConnectionsRequest): - The request object. Message for requesting list of - Connections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.ListConnectionsResponse: - Message for response to listing - Connections. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/connections', - }, - ] - request, metadata = self._interceptor.pre_list_connections(request, metadata) - pb_request = repositories.ListConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.ListConnectionsResponse() - pb_resp = repositories.ListConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_connections(resp) - return resp - - class _ListRepositories(RepositoryManagerRestStub): - def __hash__(self): - return hash("ListRepositories") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.ListRepositoriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> repositories.ListRepositoriesResponse: - r"""Call the list repositories method over HTTP. - - Args: - request (~.repositories.ListRepositoriesRequest): - The request object. Message for requesting list of - Repositories. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.repositories.ListRepositoriesResponse: - Message for response to listing - Repositories. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/connections/*}/repositories', - }, - ] - request, metadata = self._interceptor.pre_list_repositories(request, metadata) - pb_request = repositories.ListRepositoriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = repositories.ListRepositoriesResponse() - pb_resp = repositories.ListRepositoriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_repositories(resp) - return resp - - class _UpdateConnection(RepositoryManagerRestStub): - def __hash__(self): - return hash("UpdateConnection") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: repositories.UpdateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update connection method over HTTP. - - Args: - request (~.repositories.UpdateConnectionRequest): - The request object. Message for updating a Connection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{connection.name=projects/*/locations/*/connections/*}', - 'body': 'connection', - }, - ] - request, metadata = self._interceptor.pre_update_connection(request, metadata) - pb_request = repositories.UpdateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_connection(resp) - return resp - - @property - def batch_create_repositories(self) -> Callable[ - [repositories.BatchCreateRepositoriesRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchCreateRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_connection(self) -> Callable[ - [repositories.CreateConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_repository(self) -> Callable[ - [repositories.CreateRepositoryRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_connection(self) -> Callable[ - [repositories.DeleteConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_repository(self) -> Callable[ - [repositories.DeleteRepositoryRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_git_refs(self) -> Callable[ - [repositories.FetchGitRefsRequest], - repositories.FetchGitRefsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchGitRefs(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_linkable_repositories(self) -> Callable[ - [repositories.FetchLinkableRepositoriesRequest], - repositories.FetchLinkableRepositoriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchLinkableRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_read_token(self) -> Callable[ - [repositories.FetchReadTokenRequest], - repositories.FetchReadTokenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchReadToken(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_read_write_token(self) -> Callable[ - [repositories.FetchReadWriteTokenRequest], - repositories.FetchReadWriteTokenResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchReadWriteToken(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_connection(self) -> Callable[ - [repositories.GetConnectionRequest], - repositories.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_repository(self) -> Callable[ - [repositories.GetRepositoryRequest], - repositories.Repository]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetRepository(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_connections(self) -> Callable[ - [repositories.ListConnectionsRequest], - repositories.ListConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_repositories(self) -> Callable[ - [repositories.ListRepositoriesRequest], - repositories.ListRepositoriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListRepositories(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_connection(self) -> Callable[ - [repositories.UpdateConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:getIamPolicy', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(RepositoryManagerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{resource=projects/*/locations/*/connections/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(RepositoryManagerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(RepositoryManagerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'RepositoryManagerRestTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py deleted file mode 100644 index 1df6a863..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudbuild import ( - OperationMetadata, - RunWorkflowCustomOperationMetadata, -) -from .repositories import ( - BatchCreateRepositoriesRequest, - BatchCreateRepositoriesResponse, - Connection, - CreateConnectionRequest, - CreateRepositoryRequest, - DeleteConnectionRequest, - DeleteRepositoryRequest, - FetchGitRefsRequest, - FetchGitRefsResponse, - FetchLinkableRepositoriesRequest, - FetchLinkableRepositoriesResponse, - FetchReadTokenRequest, - FetchReadTokenResponse, - FetchReadWriteTokenRequest, - FetchReadWriteTokenResponse, - GetConnectionRequest, - GetRepositoryRequest, - GitHubConfig, - GitHubEnterpriseConfig, - GitLabConfig, - InstallationState, - ListConnectionsRequest, - ListConnectionsResponse, - ListRepositoriesRequest, - ListRepositoriesResponse, - OAuthCredential, - ProcessWebhookRequest, - Repository, - ServiceDirectoryConfig, - UpdateConnectionRequest, - UserCredential, -) - -__all__ = ( - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'Connection', - 'CreateConnectionRequest', - 'CreateRepositoryRequest', - 'DeleteConnectionRequest', - 'DeleteRepositoryRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenRequest', - 'FetchReadWriteTokenResponse', - 'GetConnectionRequest', - 'GetRepositoryRequest', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'InstallationState', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'OAuthCredential', - 'ProcessWebhookRequest', - 'Repository', - 'ServiceDirectoryConfig', - 'UpdateConnectionRequest', - 'UserCredential', -) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py deleted file mode 100644 index a016f0af..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/cloudbuild.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v2', - manifest={ - 'OperationMetadata', - 'RunWorkflowCustomOperationMetadata', - }, -) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class RunWorkflowCustomOperationMetadata(proto.Message): - r"""Represents the custom metadata of the RunWorkflow - long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - verb (str): - Output only. Name of the verb executed by the - operation. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - target (str): - Output only. Server-defined resource path for - the target of the operation. - pipeline_run_id (str): - Output only. ID of the pipeline run created - by RunWorkflow. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - verb: str = proto.Field( - proto.STRING, - number=3, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=4, - ) - api_version: str = proto.Field( - proto.STRING, - number=5, - ) - target: str = proto.Field( - proto.STRING, - number=6, - ) - pipeline_run_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py b/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py deleted file mode 100644 index 6d5e147a..00000000 --- a/owl-bot-staging/v2/google/cloud/devtools/cloudbuild_v2/types/repositories.py +++ /dev/null @@ -1,1104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.devtools.cloudbuild.v2', - manifest={ - 'Connection', - 'InstallationState', - 'FetchLinkableRepositoriesRequest', - 'FetchLinkableRepositoriesResponse', - 'GitHubConfig', - 'GitHubEnterpriseConfig', - 'GitLabConfig', - 'ServiceDirectoryConfig', - 'Repository', - 'OAuthCredential', - 'UserCredential', - 'CreateConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'UpdateConnectionRequest', - 'DeleteConnectionRequest', - 'CreateRepositoryRequest', - 'BatchCreateRepositoriesRequest', - 'BatchCreateRepositoriesResponse', - 'GetRepositoryRequest', - 'ListRepositoriesRequest', - 'ListRepositoriesResponse', - 'DeleteRepositoryRequest', - 'FetchReadWriteTokenRequest', - 'FetchReadTokenRequest', - 'FetchReadTokenResponse', - 'FetchReadWriteTokenResponse', - 'ProcessWebhookRequest', - 'FetchGitRefsRequest', - 'FetchGitRefsResponse', - }, -) - - -class Connection(proto.Message): - r"""A connection to a SCM like GitHub, GitHub Enterprise, - Bitbucket Server or GitLab. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Immutable. The resource name of the connection, in the - format - ``projects/{project}/locations/{location}/connections/{connection_id}``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was updated. - github_config (google.cloud.devtools.cloudbuild_v2.types.GitHubConfig): - Configuration for connections to github.com. - - This field is a member of `oneof`_ ``connection_config``. - github_enterprise_config (google.cloud.devtools.cloudbuild_v2.types.GitHubEnterpriseConfig): - Configuration for connections to an instance - of GitHub Enterprise. - - This field is a member of `oneof`_ ``connection_config``. - gitlab_config (google.cloud.devtools.cloudbuild_v2.types.GitLabConfig): - Configuration for connections to gitlab.com - or an instance of GitLab Enterprise. - - This field is a member of `oneof`_ ``connection_config``. - installation_state (google.cloud.devtools.cloudbuild_v2.types.InstallationState): - Output only. Installation state of the - Connection. - disabled (bool): - If disabled is set to true, functionality is - disabled for this connection. Repository based - API methods and webhooks processing for - repositories in this connection will be - disabled. - reconciling (bool): - Output only. Set to true when the connection - is being set up or updated in the background. - annotations (MutableMapping[str, str]): - Allows clients to store small amounts of - arbitrary data. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - github_config: 'GitHubConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='connection_config', - message='GitHubConfig', - ) - github_enterprise_config: 'GitHubEnterpriseConfig' = proto.Field( - proto.MESSAGE, - number=6, - oneof='connection_config', - message='GitHubEnterpriseConfig', - ) - gitlab_config: 'GitLabConfig' = proto.Field( - proto.MESSAGE, - number=7, - oneof='connection_config', - message='GitLabConfig', - ) - installation_state: 'InstallationState' = proto.Field( - proto.MESSAGE, - number=12, - message='InstallationState', - ) - disabled: bool = proto.Field( - proto.BOOL, - number=13, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=14, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=15, - ) - etag: str = proto.Field( - proto.STRING, - number=16, - ) - - -class InstallationState(proto.Message): - r"""Describes stage and necessary actions to be taken by the - user to complete the installation. Used for GitHub and GitHub - Enterprise based connections. - - Attributes: - stage (google.cloud.devtools.cloudbuild_v2.types.InstallationState.Stage): - Output only. Current step of the installation - process. - message (str): - Output only. Message of what the user should - do next to continue the installation. Empty - string if the installation is already complete. - action_uri (str): - Output only. Link to follow for next action. - Empty string if the installation is already - complete. - """ - class Stage(proto.Enum): - r"""Stage of the installation process. - - Values: - STAGE_UNSPECIFIED (0): - No stage specified. - PENDING_CREATE_APP (1): - Only for GitHub Enterprise. An App creation - has been requested. The user needs to confirm - the creation in their GitHub enterprise host. - PENDING_USER_OAUTH (2): - User needs to authorize the GitHub (or - Enterprise) App via OAuth. - PENDING_INSTALL_APP (3): - User needs to follow the link to install the - GitHub (or Enterprise) App. - COMPLETE (10): - Installation process has been completed. - """ - STAGE_UNSPECIFIED = 0 - PENDING_CREATE_APP = 1 - PENDING_USER_OAUTH = 2 - PENDING_INSTALL_APP = 3 - COMPLETE = 10 - - stage: Stage = proto.Field( - proto.ENUM, - number=1, - enum=Stage, - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - action_uri: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchLinkableRepositoriesRequest(proto.Message): - r"""Request message for FetchLinkableRepositories. - - Attributes: - connection (str): - Required. The name of the Connection. Format: - ``projects/*/locations/*/connections/*``. - page_size (int): - Number of results to return in the list. - Default to 20. - page_token (str): - Page start. - """ - - connection: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchLinkableRepositoriesResponse(proto.Message): - r"""Response message for FetchLinkableRepositories. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - repositories ready to be created. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GitHubConfig(proto.Message): - r"""Configuration for connections to github.com. - - Attributes: - authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.OAuthCredential): - OAuth credential of the account that - authorized the Cloud Build GitHub App. It is - recommended to use a robot account instead of a - human user account. The OAuth token must be tied - to the Cloud Build GitHub App. - app_installation_id (int): - GitHub App installation id. - """ - - authorizer_credential: 'OAuthCredential' = proto.Field( - proto.MESSAGE, - number=1, - message='OAuthCredential', - ) - app_installation_id: int = proto.Field( - proto.INT64, - number=2, - ) - - -class GitHubEnterpriseConfig(proto.Message): - r"""Configuration for connections to an instance of GitHub - Enterprise. - - Attributes: - host_uri (str): - Required. The URI of the GitHub Enterprise - host this connection is for. - api_key (str): - Required. API Key used for authentication of - webhook events. - app_id (int): - Id of the GitHub App created from the - manifest. - app_slug (str): - The URL-friendly name of the GitHub App. - private_key_secret_version (str): - SecretManager resource containing the private key of the - GitHub App, formatted as - ``projects/*/secrets/*/versions/*``. - webhook_secret_secret_version (str): - SecretManager resource containing the webhook secret of the - GitHub App, formatted as - ``projects/*/secrets/*/versions/*``. - app_installation_id (int): - ID of the installation of the GitHub App. - service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): - Configuration for using Service Directory to - privately connect to a GitHub Enterprise server. - This should only be set if the GitHub Enterprise - server is hosted on-premises and not reachable - by public internet. If this field is left empty, - calls to the GitHub Enterprise server will be - made over the public internet. - ssl_ca (str): - SSL certificate to use for requests to GitHub - Enterprise. - server_version (str): - Output only. GitHub Enterprise version installed at the - host_uri. - """ - - host_uri: str = proto.Field( - proto.STRING, - number=1, - ) - api_key: str = proto.Field( - proto.STRING, - number=12, - ) - app_id: int = proto.Field( - proto.INT64, - number=2, - ) - app_slug: str = proto.Field( - proto.STRING, - number=13, - ) - private_key_secret_version: str = proto.Field( - proto.STRING, - number=4, - ) - webhook_secret_secret_version: str = proto.Field( - proto.STRING, - number=5, - ) - app_installation_id: int = proto.Field( - proto.INT64, - number=9, - ) - service_directory_config: 'ServiceDirectoryConfig' = proto.Field( - proto.MESSAGE, - number=10, - message='ServiceDirectoryConfig', - ) - ssl_ca: str = proto.Field( - proto.STRING, - number=11, - ) - server_version: str = proto.Field( - proto.STRING, - number=14, - ) - - -class GitLabConfig(proto.Message): - r"""Configuration for connections to gitlab.com or an instance of - GitLab Enterprise. - - Attributes: - host_uri (str): - The URI of the GitLab Enterprise host this - connection is for. If not specified, the default - value is https://gitlab.com. - webhook_secret_secret_version (str): - Required. Immutable. SecretManager resource containing the - webhook secret of a GitLab Enterprise project, formatted as - ``projects/*/secrets/*/versions/*``. - read_authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): - Required. A GitLab personal access token with the minimum - ``read_api`` scope access. - authorizer_credential (google.cloud.devtools.cloudbuild_v2.types.UserCredential): - Required. A GitLab personal access token with the ``api`` - scope access. - service_directory_config (google.cloud.devtools.cloudbuild_v2.types.ServiceDirectoryConfig): - Configuration for using Service Directory to - privately connect to a GitLab Enterprise server. - This should only be set if the GitLab Enterprise - server is hosted on-premises and not reachable - by public internet. If this field is left empty, - calls to the GitLab Enterprise server will be - made over the public internet. - ssl_ca (str): - SSL certificate to use for requests to GitLab - Enterprise. - server_version (str): - Output only. Version of the GitLab Enterprise server running - on the ``host_uri``. - """ - - host_uri: str = proto.Field( - proto.STRING, - number=1, - ) - webhook_secret_secret_version: str = proto.Field( - proto.STRING, - number=2, - ) - read_authorizer_credential: 'UserCredential' = proto.Field( - proto.MESSAGE, - number=3, - message='UserCredential', - ) - authorizer_credential: 'UserCredential' = proto.Field( - proto.MESSAGE, - number=4, - message='UserCredential', - ) - service_directory_config: 'ServiceDirectoryConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='ServiceDirectoryConfig', - ) - ssl_ca: str = proto.Field( - proto.STRING, - number=6, - ) - server_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ServiceDirectoryConfig(proto.Message): - r"""ServiceDirectoryConfig represents Service Directory - configuration for a connection. - - Attributes: - service (str): - Required. The Service Directory service name. - Format: - projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}. - """ - - service: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Repository(proto.Message): - r"""A repository associated to a parent connection. - - Attributes: - name (str): - Immutable. Resource name of the repository, in the format - ``projects/*/locations/*/connections/*/repositories/*``. - remote_uri (str): - Required. Git Clone HTTPS URI. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server assigned timestamp for - when the connection was updated. - annotations (MutableMapping[str, str]): - Allows clients to store small amounts of - arbitrary data. - etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. - webhook_id (str): - Output only. External ID of the webhook - created for the repository. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - remote_uri: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - etag: str = proto.Field( - proto.STRING, - number=7, - ) - webhook_id: str = proto.Field( - proto.STRING, - number=8, - ) - - -class OAuthCredential(proto.Message): - r"""Represents an OAuth token of the account that authorized the - Connection, and associated metadata. - - Attributes: - oauth_token_secret_version (str): - A SecretManager resource containing the OAuth token that - authorizes the Cloud Build connection. Format: - ``projects/*/secrets/*/versions/*``. - username (str): - Output only. The username associated to this - token. - """ - - oauth_token_secret_version: str = proto.Field( - proto.STRING, - number=1, - ) - username: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UserCredential(proto.Message): - r"""Represents a personal access token that authorized the - Connection, and associated metadata. - - Attributes: - user_token_secret_version (str): - Required. A SecretManager resource containing the user token - that authorizes the Cloud Build connection. Format: - ``projects/*/secrets/*/versions/*``. - username (str): - Output only. The username associated to this - token. - """ - - user_token_secret_version: str = proto.Field( - proto.STRING, - number=1, - ) - username: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateConnectionRequest(proto.Message): - r"""Message for creating a Connection - - Attributes: - parent (str): - Required. Project and location where the connection will be - created. Format: ``projects/*/locations/*``. - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to create. - connection_id (str): - Required. The ID to use for the Connection, which will - become the final component of the Connection's resource - name. Names must be unique per-project per-location. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=2, - message='Connection', - ) - connection_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetConnectionRequest(proto.Message): - r"""Message for getting the details of a Connection. - - Attributes: - name (str): - Required. The name of the Connection to retrieve. Format: - ``projects/*/locations/*/connections/*``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListConnectionsRequest(proto.Message): - r"""Message for requesting list of Connections. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Connections. Format: ``projects/*/locations/*``. - page_size (int): - Number of results to return in the list. - page_token (str): - Page start. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListConnectionsResponse(proto.Message): - r"""Message for response to listing Connections. - - Attributes: - connections (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Connection]): - The list of Connections. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - connections: MutableSequence['Connection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Connection', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateConnectionRequest(proto.Message): - r"""Message for updating a Connection. - - Attributes: - connection (google.cloud.devtools.cloudbuild_v2.types.Connection): - Required. The Connection to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be updated. - allow_missing (bool): - If set to true, and the connection is not found a new - connection will be created. In this situation - ``update_mask`` is ignored. The creation will succeed only - if the input connection has all the necessary information - (e.g a github_config with both user_oauth_token and - installation_id properties). - etag (str): - The current etag of the connection. - If an etag is provided and does not match the - current etag of the connection, update will be - blocked and an ABORTED error will be returned. - """ - - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=1, - message='Connection', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - etag: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DeleteConnectionRequest(proto.Message): - r"""Message for deleting a Connection. - - Attributes: - name (str): - Required. The name of the Connection to delete. Format: - ``projects/*/locations/*/connections/*``. - etag (str): - The current etag of the connection. - If an etag is provided and does not match the - current etag of the connection, deletion will be - blocked and an ABORTED error will be returned. - validate_only (bool): - If set, validate the request, but do not - actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class CreateRepositoryRequest(proto.Message): - r"""Message for creating a Repository. - - Attributes: - parent (str): - Required. The connection to contain the - repository. If the request is part of a - BatchCreateRepositoriesRequest, this field - should be empty or match the parent specified - there. - repository (google.cloud.devtools.cloudbuild_v2.types.Repository): - Required. The repository to create. - repository_id (str): - Required. The ID to use for the repository, which will - become the final component of the repository's resource - name. This ID should be unique in the connection. Allows - alphanumeric characters and any of -._~%!$&'()*+,;=@. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - repository: 'Repository' = proto.Field( - proto.MESSAGE, - number=2, - message='Repository', - ) - repository_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BatchCreateRepositoriesRequest(proto.Message): - r"""Message for creating repositoritories in batch. - - Attributes: - parent (str): - Required. The connection to contain all the repositories - being created. Format: - projects/\ */locations/*/connections/\* The parent field in - the CreateRepositoryRequest messages must either be empty or - match this field. - requests (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]): - Required. The request messages specifying the - repositories to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - requests: MutableSequence['CreateRepositoryRequest'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='CreateRepositoryRequest', - ) - - -class BatchCreateRepositoriesResponse(proto.Message): - r"""Message for response of creating repositories in batch. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - Repository resources created. - """ - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - - -class GetRepositoryRequest(proto.Message): - r"""Message for getting the details of a Repository. - - Attributes: - name (str): - Required. The name of the Repository to retrieve. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListRepositoriesRequest(proto.Message): - r"""Message for requesting list of Repositories. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Repositories. Format: - ``projects/*/locations/*/connections/*``. - page_size (int): - Number of results to return in the list. - page_token (str): - Page start. - filter (str): - A filter expression that filters resources listed in the - response. Expressions must follow API improvement proposal - `AIP-160 `__. e.g. - ``remote_uri:"https://github.com*"``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListRepositoriesResponse(proto.Message): - r"""Message for response to listing Repositories. - - Attributes: - repositories (MutableSequence[google.cloud.devtools.cloudbuild_v2.types.Repository]): - The list of Repositories. - next_page_token (str): - A token identifying a page of results the - server should return. - """ - - @property - def raw_page(self): - return self - - repositories: MutableSequence['Repository'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Repository', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteRepositoryRequest(proto.Message): - r"""Message for deleting a Repository. - - Attributes: - name (str): - Required. The name of the Repository to delete. Format: - ``projects/*/locations/*/connections/*/repositories/*``. - etag (str): - The current etag of the repository. - If an etag is provided and does not match the - current etag of the repository, deletion will be - blocked and an ABORTED error will be returned. - validate_only (bool): - If set, validate the request, but do not - actually post it. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class FetchReadWriteTokenRequest(proto.Message): - r"""Message for fetching SCM read/write token. - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FetchReadTokenRequest(proto.Message): - r"""Message for fetching SCM read token. - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - """ - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FetchReadTokenResponse(proto.Message): - r"""Message for responding to get read token. - - Attributes: - token (str): - The token content. - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Expiration timestamp. Can be empty if unknown - or non-expiring. - """ - - token: str = proto.Field( - proto.STRING, - number=1, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class FetchReadWriteTokenResponse(proto.Message): - r"""Message for responding to get read/write token. - - Attributes: - token (str): - The token content. - expiration_time (google.protobuf.timestamp_pb2.Timestamp): - Expiration timestamp. Can be empty if unknown - or non-expiring. - """ - - token: str = proto.Field( - proto.STRING, - number=1, - ) - expiration_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class ProcessWebhookRequest(proto.Message): - r"""RPC request object accepted by the ProcessWebhook RPC method. - - Attributes: - parent (str): - Required. Project and location where the webhook will be - received. Format: ``projects/*/locations/*``. - body (google.api.httpbody_pb2.HttpBody): - HTTP request body. - webhook_key (str): - Arbitrary additional key to find the maching - repository for a webhook event if needed. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - body: httpbody_pb2.HttpBody = proto.Field( - proto.MESSAGE, - number=2, - message=httpbody_pb2.HttpBody, - ) - webhook_key: str = proto.Field( - proto.STRING, - number=3, - ) - - -class FetchGitRefsRequest(proto.Message): - r"""Request for fetching git refs - - Attributes: - repository (str): - Required. The resource name of the repository in the format - ``projects/*/locations/*/connections/*/repositories/*``. - ref_type (google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest.RefType): - Type of refs to fetch - """ - class RefType(proto.Enum): - r"""Type of refs - - Values: - REF_TYPE_UNSPECIFIED (0): - No type specified. - TAG (1): - To fetch tags. - BRANCH (2): - To fetch branches. - """ - REF_TYPE_UNSPECIFIED = 0 - TAG = 1 - BRANCH = 2 - - repository: str = proto.Field( - proto.STRING, - number=1, - ) - ref_type: RefType = proto.Field( - proto.ENUM, - number=2, - enum=RefType, - ) - - -class FetchGitRefsResponse(proto.Message): - r"""Response for fetching git refs - - Attributes: - ref_names (MutableSequence[str]): - Name of the refs fetched. - """ - - ref_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index 89095013..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/devtools/cloudbuild_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py deleted file mode 100644 index fa9a6929..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py deleted file mode 100644 index cad8baae..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_batch_create_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - requests = cloudbuild_v2.CreateRepositoryRequest() - requests.parent = "parent_value" - requests.repository.remote_uri = "remote_uri_value" - requests.repository_id = "repository_id_value" - - request = cloudbuild_v2.BatchCreateRepositoriesRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - operation = client.batch_create_repositories(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py deleted file mode 100644 index 066f3245..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py deleted file mode 100644 index d393a554..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_connection_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_create_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.CreateConnectionRequest( - parent="parent_value", - connection_id="connection_id_value", - ) - - # Make the request - operation = client.create_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py deleted file mode 100644 index 52aaa857..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py deleted file mode 100644 index eb9a5e29..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_create_repository_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_create_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - repository = cloudbuild_v2.Repository() - repository.remote_uri = "remote_uri_value" - - request = cloudbuild_v2.CreateRepositoryRequest( - parent="parent_value", - repository=repository, - repository_id="repository_id_value", - ) - - # Make the request - operation = client.create_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py deleted file mode 100644 index ef37e513..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py deleted file mode 100644 index a18ff650..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_connection_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_delete_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py deleted file mode 100644 index 58a5dac2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py deleted file mode 100644 index f141cb54..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_delete_repository_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_delete_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.DeleteRepositoryRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_repository(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py deleted file mode 100644 index 2c639684..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchGitRefs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_git_refs(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py deleted file mode 100644 index fde064f0..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchGitRefs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_git_refs(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchGitRefsRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_git_refs(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py deleted file mode 100644 index c6c744fc..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchLinkableRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py deleted file mode 100644 index 9d422598..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchLinkableRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_linkable_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchLinkableRepositoriesRequest( - connection="connection_value", - ) - - # Make the request - page_result = client.fetch_linkable_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py deleted file mode 100644 index b110edb9..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py deleted file mode 100644 index 08680d32..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_read_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py deleted file mode 100644 index f2fab11e..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadWriteToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = await client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py deleted file mode 100644 index 64062425..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchReadWriteToken -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_fetch_read_write_token(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.FetchReadWriteTokenRequest( - repository="repository_value", - ) - - # Make the request - response = client.fetch_read_write_token(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py deleted file mode 100644 index cbce2c58..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py deleted file mode 100644 index 7da0f760..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_get_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py deleted file mode 100644 index 077cd120..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_repository(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py deleted file mode 100644 index 4f0bbd72..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_get_repository_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetRepository -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_get_repository(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.GetRepositoryRequest( - name="name_value", - ) - - # Make the request - response = client.get_repository(request=request) - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_GetRepository_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py deleted file mode 100644 index 78e39000..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py deleted file mode 100644 index b0b6783d..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_connections_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_list_connections(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListConnections_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py deleted file mode 100644 index 6140bd1a..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py deleted file mode 100644 index b133c8eb..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_list_repositories_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRepositories -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_list_repositories(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.ListRepositoriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_repositories(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py deleted file mode 100644 index 792d9cd7..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -async def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerAsyncClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async] diff --git a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py b/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py deleted file mode 100644 index f1583940..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/cloudbuild_v2_generated_repository_manager_update_connection_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-build - - -# [START cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.devtools import cloudbuild_v2 - - -def sample_update_connection(): - # Create a client - client = cloudbuild_v2.RepositoryManagerClient() - - # Initialize request argument(s) - request = cloudbuild_v2.UpdateConnectionRequest( - ) - - # Make the request - operation = client.update_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync] diff --git a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json deleted file mode 100644 index 818d3fc2..00000000 --- a/owl-bot-staging/v2/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ /dev/null @@ -1,2309 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.devtools.cloudbuild.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-build", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.batch_create_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "BatchCreateRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_create_repositories" - }, - "description": "Sample for BatchCreateRepositories", - "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.batch_create_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.BatchCreateRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "BatchCreateRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.BatchCreateRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "requests", - "type": "MutableSequence[google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_create_repositories" - }, - "description": "Sample for BatchCreateRepositories", - "file": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_BatchCreateRepositories_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_batch_create_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "cloudbuild_v2_generated_repository_manager_create_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateConnection_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.create_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "repository", - "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" - }, - { - "name": "repository_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_repository" - }, - "description": "Sample for CreateRepository", - "file": "cloudbuild_v2_generated_repository_manager_create_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.create_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.CreateRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "CreateRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.CreateRepositoryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "repository", - "type": "google.cloud.devtools.cloudbuild_v2.types.Repository" - }, - { - "name": "repository_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_repository" - }, - "description": "Sample for CreateRepository", - "file": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_CreateRepository_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_create_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteConnection_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.delete_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_repository" - }, - "description": "Sample for DeleteRepository", - "file": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.delete_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.DeleteRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "DeleteRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.DeleteRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_repository" - }, - "description": "Sample for DeleteRepository", - "file": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_DeleteRepository_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_delete_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_git_refs", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchGitRefs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", - "shortName": "fetch_git_refs" - }, - "description": "Sample for FetchGitRefs", - "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_git_refs", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchGitRefs", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchGitRefs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchGitRefsResponse", - "shortName": "fetch_git_refs" - }, - "description": "Sample for FetchGitRefs", - "file": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchGitRefs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_git_refs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_linkable_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchLinkableRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesAsyncPager", - "shortName": "fetch_linkable_repositories" - }, - "description": "Sample for FetchLinkableRepositories", - "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_linkable_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchLinkableRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchLinkableRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchLinkableRepositoriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.FetchLinkableRepositoriesPager", - "shortName": "fetch_linkable_repositories" - }, - "description": "Sample for FetchLinkableRepositories", - "file": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchLinkableRepositories_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_linkable_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", - "shortName": "fetch_read_token" - }, - "description": "Sample for FetchReadToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadTokenResponse", - "shortName": "fetch_read_token" - }, - "description": "Sample for FetchReadToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadToken_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_token_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.fetch_read_write_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadWriteToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", - "shortName": "fetch_read_write_token" - }, - "description": "Sample for FetchReadWriteToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.fetch_read_write_token", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.FetchReadWriteToken", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "FetchReadWriteToken" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenRequest" - }, - { - "name": "repository", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.FetchReadWriteTokenResponse", - "shortName": "fetch_read_write_token" - }, - "description": "Sample for FetchReadWriteToken", - "file": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_FetchReadWriteToken_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_fetch_read_write_token_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "cloudbuild_v2_generated_repository_manager_get_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.get_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", - "shortName": "get_repository" - }, - "description": "Sample for GetRepository", - "file": "cloudbuild_v2_generated_repository_manager_get_repository_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_repository_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.get_repository", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.GetRepository", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "GetRepository" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.GetRepositoryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.types.Repository", - "shortName": "get_repository" - }, - "description": "Sample for GetRepository", - "file": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_GetRepository_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_get_repository_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_connections", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsAsyncPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "cloudbuild_v2_generated_repository_manager_list_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_connections", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListConnections", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListConnectionsPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.list_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesAsyncPager", - "shortName": "list_repositories" - }, - "description": "Sample for ListRepositories", - "file": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_repositories_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.list_repositories", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.ListRepositories", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "ListRepositories" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.ListRepositoriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.devtools.cloudbuild_v2.services.repository_manager.pagers.ListRepositoriesPager", - "shortName": "list_repositories" - }, - "description": "Sample for ListRepositories", - "file": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_ListRepositories_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_list_repositories_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient", - "shortName": "RepositoryManagerAsyncClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerAsyncClient.update_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "cloudbuild_v2_generated_repository_manager_update_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_update_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient", - "shortName": "RepositoryManagerClient" - }, - "fullName": "google.cloud.devtools.cloudbuild_v2.RepositoryManagerClient.update_connection", - "method": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager.UpdateConnection", - "service": { - "fullName": "google.devtools.cloudbuild.v2.RepositoryManager", - "shortName": "RepositoryManager" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.devtools.cloudbuild_v2.types.UpdateConnectionRequest" - }, - { - "name": "connection", - "type": "google.cloud.devtools.cloudbuild_v2.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudbuild_v2_generated_RepositoryManager_UpdateConnection_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudbuild_v2_generated_repository_manager_update_connection_sync.py" - } - ] -} diff --git a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py deleted file mode 100644 index 6df46861..00000000 --- a/owl-bot-staging/v2/scripts/fixup_cloudbuild_v2_keywords.py +++ /dev/null @@ -1,189 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class cloudbuildCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_repositories': ('parent', 'requests', ), - 'create_connection': ('parent', 'connection', 'connection_id', ), - 'create_repository': ('parent', 'repository', 'repository_id', ), - 'delete_connection': ('name', 'etag', 'validate_only', ), - 'delete_repository': ('name', 'etag', 'validate_only', ), - 'fetch_git_refs': ('repository', 'ref_type', ), - 'fetch_linkable_repositories': ('connection', 'page_size', 'page_token', ), - 'fetch_read_token': ('repository', ), - 'fetch_read_write_token': ('repository', ), - 'get_connection': ('name', ), - 'get_repository': ('name', ), - 'list_connections': ('parent', 'page_size', 'page_token', ), - 'list_repositories': ('parent', 'page_size', 'page_token', 'filter', ), - 'update_connection': ('connection', 'update_mask', 'allow_missing', 'etag', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=cloudbuildCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the cloudbuild client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 525db897..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-build' - - -description = "Google Cloud Build API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/devtools/cloudbuild/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-build" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud", "google.cloud.devtools"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v2/testing/constraints-3.10.txt b/owl-bot-staging/v2/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.11.txt b/owl-bot-staging/v2/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.12.txt b/owl-bot-staging/v2/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.7.txt b/owl-bot-staging/v2/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf99..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v2/testing/constraints-3.8.txt b/owl-bot-staging/v2/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/testing/constraints-3.9.txt b/owl-bot-staging/v2/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py deleted file mode 100644 index 786103fe..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ /dev/null @@ -1,9596 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerAsyncClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import RepositoryManagerClient -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import pagers -from google.cloud.devtools.cloudbuild_v2.services.repository_manager import transports -from google.cloud.devtools.cloudbuild_v2.types import cloudbuild -from google.cloud.devtools.cloudbuild_v2.types import repositories -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert RepositoryManagerClient._get_default_mtls_endpoint(None) is None - assert RepositoryManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert RepositoryManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (RepositoryManagerClient, "grpc"), - (RepositoryManagerAsyncClient, "grpc_asyncio"), - (RepositoryManagerClient, "rest"), -]) -def test_repository_manager_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.RepositoryManagerGrpcTransport, "grpc"), - (transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.RepositoryManagerRestTransport, "rest"), -]) -def test_repository_manager_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (RepositoryManagerClient, "grpc"), - (RepositoryManagerAsyncClient, "grpc_asyncio"), - (RepositoryManagerClient, "rest"), -]) -def test_repository_manager_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudbuild.googleapis.com' - ) - - -def test_repository_manager_client_get_transport_class(): - transport = RepositoryManagerClient.get_transport_class() - available_transports = [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerRestTransport, - ] - assert transport in available_transports - - transport = RepositoryManagerClient.get_transport_class("grpc") - assert transport == transports.RepositoryManagerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -def test_repository_manager_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(RepositoryManagerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "true"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", "false"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "true"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", "false"), -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_repository_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - RepositoryManagerClient, RepositoryManagerAsyncClient -]) -@mock.patch.object(RepositoryManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerClient)) -@mock.patch.object(RepositoryManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RepositoryManagerAsyncClient)) -def test_repository_manager_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc"), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio"), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest"), -]) -def test_repository_manager_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (RepositoryManagerClient, transports.RepositoryManagerRestTransport, "rest", None), -]) -def test_repository_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_repository_manager_client_client_options_from_dict(): - with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = RepositoryManagerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport, "grpc", grpc_helpers), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_repository_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateConnectionRequest, - dict, -]) -def test_create_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - client.create_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - -@pytest.mark.asyncio -async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_connection_async_from_dict(): - await test_create_connection_async(request_type=dict) - - -def test_create_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_connection( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - - -def test_create_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - -@pytest.mark.asyncio -async def test_create_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_connection( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetConnectionRequest, - dict, -]) -def test_get_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - ) - response = client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -def test_get_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - client.get_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - -@pytest.mark.asyncio -async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.GetConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - )) - response = await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_connection_async_from_dict(): - await test_get_connection_async(request_type=dict) - - -def test_get_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = repositories.Connection() - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListConnectionsRequest, - dict, -]) -def test_list_connections(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - client.list_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - -@pytest.mark.asyncio -async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=repositories.ListConnectionsRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListConnectionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_connections_async_from_dict(): - await test_list_connections_async(request_type=dict) - - -def test_list_connections_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = repositories.ListConnectionsResponse() - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_connections_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_connections_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_connections_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_connections_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_connections_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_connections(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Connection) - for i in results) -def test_list_connections_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_connections_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Connection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_connections_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.UpdateConnectionRequest, - dict, -]) -def test_update_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - client.update_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - -@pytest.mark.asyncio -async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.UpdateConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.UpdateConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_connection_async_from_dict(): - await test_update_connection_async(request_type=dict) - - -def test_update_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.UpdateConnectionRequest() - - request.connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.UpdateConnectionRequest() - - request.connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection.name=name_value', - ) in kw['metadata'] - - -def test_update_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_connection( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_connection( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].connection - mock_val = repositories.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteConnectionRequest, - dict, -]) -def test_delete_connection(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - client.delete_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - -@pytest.mark.asyncio -async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteConnectionRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteConnectionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_connection_async_from_dict(): - await test_delete_connection_async(request_type=dict) - - -def test_delete_connection_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_connection_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_connection_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_connection_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_connection_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_connection_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateRepositoryRequest, - dict, -]) -def test_create_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - client.create_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - -@pytest.mark.asyncio -async def test_create_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.CreateRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.CreateRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_repository_async_from_dict(): - await test_create_repository_async(request_type=dict) - - -def test_create_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateRepositoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.CreateRepositoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_repository( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].repository - mock_val = repositories.Repository(name='name_value') - assert arg == mock_val - arg = args[0].repository_id - mock_val = 'repository_id_value' - assert arg == mock_val - - -def test_create_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - -@pytest.mark.asyncio -async def test_create_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_repository( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].repository - mock_val = repositories.Repository(name='name_value') - assert arg == mock_val - arg = args[0].repository_id - mock_val = 'repository_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.BatchCreateRepositoriesRequest, - dict, -]) -def test_batch_create_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_batch_create_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - client.batch_create_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - -@pytest.mark.asyncio -async def test_batch_create_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.BatchCreateRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.BatchCreateRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_batch_create_repositories_async_from_dict(): - await test_batch_create_repositories_async(request_type=dict) - - -def test_batch_create_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.BatchCreateRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_create_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.BatchCreateRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.batch_create_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_batch_create_repositories_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_create_repositories( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].requests - mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] - assert arg == mock_val - - -def test_batch_create_repositories_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - -@pytest.mark.asyncio -async def test_batch_create_repositories_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_create_repositories( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].requests - mock_val = [repositories.CreateRepositoryRequest(parent='parent_value')] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_create_repositories_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetRepositoryRequest, - dict, -]) -def test_get_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - ) - response = client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -def test_get_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - client.get_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - -@pytest.mark.asyncio -async def test_get_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.GetRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - )) - response = await client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.GetRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -@pytest.mark.asyncio -async def test_get_repository_async_from_dict(): - await test_get_repository_async(request_type=dict) - - -def test_get_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - call.return_value = repositories.Repository() - client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.GetRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) - await client.get_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.Repository() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.Repository()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListRepositoriesRequest, - dict, -]) -def test_list_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - client.list_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - -@pytest.mark.asyncio -async def test_list_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.ListRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.ListRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_repositories_async_from_dict(): - await test_list_repositories_async(request_type=dict) - - -def test_list_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - call.return_value = repositories.ListRepositoriesResponse() - client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.ListRepositoriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) - await client.list_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_repositories_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_repositories( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_repositories_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_repositories_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.ListRepositoriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.ListRepositoriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_repositories( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_repositories_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - - -def test_list_repositories_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_repositories(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) -def test_list_repositories_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = list(client.list_repositories(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_repositories_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_repositories(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Repository) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_repositories_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_repositories(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteRepositoryRequest, - dict, -]) -def test_delete_repository(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_repository_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - client.delete_repository() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - -@pytest.mark.asyncio -async def test_delete_repository_async(transport: str = 'grpc_asyncio', request_type=repositories.DeleteRepositoryRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.DeleteRepositoryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_repository_async_from_dict(): - await test_delete_repository_async(request_type=dict) - - -def test_delete_repository_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_repository_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.DeleteRepositoryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_repository(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_repository_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_repository_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_repository_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_repository), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_repository( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_repository_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadWriteTokenRequest, - dict, -]) -def test_fetch_read_write_token(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse( - token='token_value', - ) - response = client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_write_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - client.fetch_read_write_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - -@pytest.mark.asyncio -async def test_fetch_read_write_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadWriteTokenRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse( - token='token_value', - )) - response = await client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadWriteTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -@pytest.mark.asyncio -async def test_fetch_read_write_token_async_from_dict(): - await test_fetch_read_write_token_async(request_type=dict) - - -def test_fetch_read_write_token_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadWriteTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - call.return_value = repositories.FetchReadWriteTokenResponse() - client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_read_write_token_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadWriteTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) - await client.fetch_read_write_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_read_write_token_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_read_write_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_read_write_token_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_read_write_token_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_write_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadWriteTokenResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadWriteTokenResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_read_write_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_read_write_token_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadTokenRequest, - dict, -]) -def test_fetch_read_token(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse( - token='token_value', - ) - response = client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - client.fetch_read_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - -@pytest.mark.asyncio -async def test_fetch_read_token_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchReadTokenRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse( - token='token_value', - )) - response = await client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchReadTokenRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -@pytest.mark.asyncio -async def test_fetch_read_token_async_from_dict(): - await test_fetch_read_token_async(request_type=dict) - - -def test_fetch_read_token_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - call.return_value = repositories.FetchReadTokenResponse() - client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_read_token_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchReadTokenRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) - await client.fetch_read_token(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_read_token_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_read_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_read_token_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_read_token_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_read_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchReadTokenResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchReadTokenResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_read_token( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_read_token_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchLinkableRepositoriesRequest, - dict, -]) -def test_fetch_linkable_repositories(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - ) - response = client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_fetch_linkable_repositories_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - client.fetch_linkable_repositories() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchLinkableRepositoriesRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchLinkableRepositoriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_from_dict(): - await test_fetch_linkable_repositories_async(request_type=dict) - - -def test_fetch_linkable_repositories_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchLinkableRepositoriesRequest() - - request.connection = 'connection_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - call.return_value = repositories.FetchLinkableRepositoriesResponse() - client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection=connection_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchLinkableRepositoriesRequest() - - request.connection = 'connection_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchLinkableRepositoriesResponse()) - await client.fetch_linkable_repositories(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'connection=connection_value', - ) in kw['metadata'] - - -def test_fetch_linkable_repositories_pager(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('connection', ''), - )), - ) - pager = client.fetch_linkable_repositories(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) -def test_fetch_linkable_repositories_pages(transport_name: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = list(client.fetch_linkable_repositories(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_pager(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - async_pager = await client.fetch_linkable_repositories(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, repositories.Repository) - for i in responses) - - -@pytest.mark.asyncio -async def test_fetch_linkable_repositories_async_pages(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_linkable_repositories), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.fetch_linkable_repositories(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - repositories.FetchGitRefsRequest, - dict, -]) -def test_fetch_git_refs(request_type, transport: str = 'grpc'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - ) - response = client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -def test_fetch_git_refs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - client.fetch_git_refs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - -@pytest.mark.asyncio -async def test_fetch_git_refs_async(transport: str = 'grpc_asyncio', request_type=repositories.FetchGitRefsRequest): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - )) - response = await client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == repositories.FetchGitRefsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -@pytest.mark.asyncio -async def test_fetch_git_refs_async_from_dict(): - await test_fetch_git_refs_async(request_type=dict) - - -def test_fetch_git_refs_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchGitRefsRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - call.return_value = repositories.FetchGitRefsResponse() - client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_git_refs_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = repositories.FetchGitRefsRequest() - - request.repository = 'repository_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) - await client.fetch_git_refs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'repository=repository_value', - ) in kw['metadata'] - - -def test_fetch_git_refs_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_git_refs( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - - -def test_fetch_git_refs_flattened_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - -@pytest.mark.asyncio -async def test_fetch_git_refs_flattened_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_git_refs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = repositories.FetchGitRefsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(repositories.FetchGitRefsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_git_refs( - repository='repository_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].repository - mock_val = 'repository_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_git_refs_flattened_error_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateConnectionRequest, - dict, -]) -def test_create_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_connection_rest_required_fields(request_type=repositories.CreateConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["connection_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "connectionId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "connectionId" in jsonified_request - assert jsonified_request["connectionId"] == request_init["connection_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["connectionId"] = 'connection_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("connection_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "connectionId" in jsonified_request - assert jsonified_request["connectionId"] == 'connection_id_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_connection(request) - - expected_params = [ - ( - "connectionId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("connectionId", )) & set(("parent", "connection", "connectionId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.CreateConnectionRequest.pb(repositories.CreateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.CreateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_connection(request) - - -def test_create_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_create_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - repositories.CreateConnectionRequest(), - parent='parent_value', - connection=repositories.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -def test_create_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetConnectionRequest, - dict, -]) -def test_get_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Connection( - name='name_value', - disabled=True, - reconciling=True, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Connection) - assert response.name == 'name_value' - assert response.disabled is True - assert response.reconciling is True - assert response.etag == 'etag_value' - - -def test_get_connection_rest_required_fields(request_type=repositories.GetConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.GetConnectionRequest.pb(repositories.GetConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.Connection.to_json(repositories.Connection()) - - request = repositories.GetConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.Connection() - - client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.GetConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_connection(request) - - -def test_get_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_get_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - repositories.GetConnectionRequest(), - name='name_value', - ) - - -def test_get_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListConnectionsRequest, - dict, -]) -def test_list_connections_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_rest_required_fields(request_type=repositories.ListConnectionsRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_connections(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_connections_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connections_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_connections") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.ListConnectionsRequest.pb(repositories.ListConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.ListConnectionsResponse.to_json(repositories.ListConnectionsResponse()) - - request = repositories.ListConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.ListConnectionsResponse() - - client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_connections_rest_bad_request(transport: str = 'rest', request_type=repositories.ListConnectionsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_connections(request) - - -def test_list_connections_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_list_connections_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - repositories.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - repositories.Connection(), - ], - next_page_token='abc', - ), - repositories.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - ], - next_page_token='ghi', - ), - repositories.ListConnectionsResponse( - connections=[ - repositories.Connection(), - repositories.Connection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.ListConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Connection) - for i in results) - - pages = list(client.list_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.UpdateConnectionRequest, - dict, -]) -def test_update_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_connection_rest_required_fields(request_type=repositories.UpdateConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "updateMask", )) & set(("connection", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_update_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_update_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.UpdateConnectionRequest.pb(repositories.UpdateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.UpdateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.UpdateConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - request_init["connection"] = {'name': 'projects/sample1/locations/sample2/connections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'github_config': {'authorizer_credential': {'oauth_token_secret_version': 'oauth_token_secret_version_value', 'username': 'username_value'}, 'app_installation_id': 2014}, 'github_enterprise_config': {'host_uri': 'host_uri_value', 'api_key': 'api_key_value', 'app_id': 621, 'app_slug': 'app_slug_value', 'private_key_secret_version': 'private_key_secret_version_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'app_installation_id': 2014, 'service_directory_config': {'service': 'service_value'}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'gitlab_config': {'host_uri': 'host_uri_value', 'webhook_secret_secret_version': 'webhook_secret_secret_version_value', 'read_authorizer_credential': {'user_token_secret_version': 'user_token_secret_version_value', 'username': 'username_value'}, 'authorizer_credential': {}, 'service_directory_config': {}, 'ssl_ca': 'ssl_ca_value', 'server_version': 'server_version_value'}, 'installation_state': {'stage': 1, 'message': 'message_value', 'action_uri': 'action_uri_value'}, 'disabled': True, 'reconciling': True, 'annotations': {}, 'etag': 'etag_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_connection(request) - - -def test_update_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'connection': {'name': 'projects/sample1/locations/sample2/connections/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{connection.name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_update_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - repositories.UpdateConnectionRequest(), - connection=repositories.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteConnectionRequest, - dict, -]) -def test_delete_connection_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_connection_rest_required_fields(request_type=repositories.DeleteConnectionRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_connection_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_connection_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_connection") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.DeleteConnectionRequest.pb(repositories.DeleteConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.DeleteConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_connection_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteConnectionRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_connection(request) - - -def test_delete_connection_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_delete_connection_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - repositories.DeleteConnectionRequest(), - name='name_value', - ) - - -def test_delete_connection_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.CreateRepositoryRequest, - dict, -]) -def test_create_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_repository(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_repository_rest_required_fields(request_type=repositories.CreateRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["repository_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "repositoryId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == request_init["repository_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["repositoryId"] = 'repository_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_repository._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("repository_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "repositoryId" in jsonified_request - assert jsonified_request["repositoryId"] == 'repository_id_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_repository(request) - - expected_params = [ - ( - "repositoryId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(("repositoryId", )) & set(("parent", "repository", "repositoryId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_create_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_create_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.CreateRepositoryRequest.pb(repositories.CreateRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.CreateRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.CreateRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request_init["repository"] = {'name': 'name_value', 'remote_uri': 'remote_uri_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'annotations': {}, 'etag': 'etag_value', 'webhook_id': 'webhook_id_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_repository(request) - - -def test_create_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) - - -def test_create_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_repository( - repositories.CreateRepositoryRequest(), - parent='parent_value', - repository=repositories.Repository(name='name_value'), - repository_id='repository_id_value', - ) - - -def test_create_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.BatchCreateRepositoriesRequest, - dict, -]) -def test_batch_create_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_create_repositories(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_batch_create_repositories_rest_required_fields(request_type=repositories.BatchCreateRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.batch_create_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_create_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_create_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "requests", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_batch_create_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_batch_create_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.BatchCreateRepositoriesRequest.pb(repositories.BatchCreateRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.BatchCreateRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.batch_create_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_create_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.BatchCreateRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_create_repositories(request) - - -def test_batch_create_repositories_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.batch_create_repositories(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories:batchCreate" % client.transport._host, args[1]) - - -def test_batch_create_repositories_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_repositories( - repositories.BatchCreateRepositoriesRequest(), - parent='parent_value', - requests=[repositories.CreateRepositoryRequest(parent='parent_value')], - ) - - -def test_batch_create_repositories_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.GetRepositoryRequest, - dict, -]) -def test_get_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Repository( - name='name_value', - remote_uri='remote_uri_value', - etag='etag_value', - webhook_id='webhook_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_repository(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.Repository) - assert response.name == 'name_value' - assert response.remote_uri == 'remote_uri_value' - assert response.etag == 'etag_value' - assert response.webhook_id == 'webhook_id_value' - - -def test_get_repository_rest_required_fields(request_type=repositories.GetRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.Repository() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_repository(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_get_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_get_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.GetRepositoryRequest.pb(repositories.GetRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.Repository.to_json(repositories.Repository()) - - request = repositories.GetRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.Repository() - - client.get_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.GetRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_repository(request) - - -def test_get_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.Repository() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.Repository.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) - - -def test_get_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_repository( - repositories.GetRepositoryRequest(), - name='name_value', - ) - - -def test_get_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.ListRepositoriesRequest, - dict, -]) -def test_list_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_repositories(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_repositories_rest_required_fields(request_type=repositories.ListRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_repositories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_list_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_list_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.ListRepositoriesRequest.pb(repositories.ListRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.ListRepositoriesResponse.to_json(repositories.ListRepositoriesResponse()) - - request = repositories.ListRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.ListRepositoriesResponse() - - client.list_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.ListRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_repositories(request) - - -def test_list_repositories_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.ListRepositoriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.ListRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_repositories(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/connections/*}/repositories" % client.transport._host, args[1]) - - -def test_list_repositories_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_repositories( - repositories.ListRepositoriesRequest(), - parent='parent_value', - ) - - -def test_list_repositories_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.ListRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.ListRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.ListRepositoriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/connections/sample3'} - - pager = client.list_repositories(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) - - pages = list(client.list_repositories(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.DeleteRepositoryRequest, - dict, -]) -def test_delete_repository_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_repository(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_repository_rest_required_fields(request_type=repositories.DeleteRepositoryRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_repository._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_repository(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_repository_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_repository._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_repository_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_delete_repository") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_delete_repository") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.DeleteRepositoryRequest.pb(repositories.DeleteRepositoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = repositories.DeleteRepositoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_repository(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_repository_rest_bad_request(transport: str = 'rest', request_type=repositories.DeleteRepositoryRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_repository(request) - - -def test_delete_repository_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_repository(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/connections/*/repositories/*}" % client.transport._host, args[1]) - - -def test_delete_repository_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_repository( - repositories.DeleteRepositoryRequest(), - name='name_value', - ) - - -def test_delete_repository_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadWriteTokenRequest, - dict, -]) -def test_fetch_read_write_token_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_read_write_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadWriteTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_write_token_rest_required_fields(request_type=repositories.FetchReadWriteTokenRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_write_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_read_write_token(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_read_write_token_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_read_write_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_read_write_token_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_write_token") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_write_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchReadWriteTokenRequest.pb(repositories.FetchReadWriteTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchReadWriteTokenResponse.to_json(repositories.FetchReadWriteTokenResponse()) - - request = repositories.FetchReadWriteTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchReadWriteTokenResponse() - - client.fetch_read_write_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_read_write_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadWriteTokenRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_read_write_token(request) - - -def test_fetch_read_write_token_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadWriteTokenResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadWriteTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_read_write_token(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadWriteToken" % client.transport._host, args[1]) - - -def test_fetch_read_write_token_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_write_token( - repositories.FetchReadWriteTokenRequest(), - repository='repository_value', - ) - - -def test_fetch_read_write_token_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchReadTokenRequest, - dict, -]) -def test_fetch_read_token_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_read_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchReadTokenResponse) - assert response.token == 'token_value' - - -def test_fetch_read_token_rest_required_fields(request_type=repositories.FetchReadTokenRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_read_token._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_read_token(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_read_token_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_read_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_read_token_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_read_token") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_read_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchReadTokenRequest.pb(repositories.FetchReadTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchReadTokenResponse.to_json(repositories.FetchReadTokenResponse()) - - request = repositories.FetchReadTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchReadTokenResponse() - - client.fetch_read_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_read_token_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchReadTokenRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_read_token(request) - - -def test_fetch_read_token_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchReadTokenResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchReadTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_read_token(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:accessReadToken" % client.transport._host, args[1]) - - -def test_fetch_read_token_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_read_token( - repositories.FetchReadTokenRequest(), - repository='repository_value', - ) - - -def test_fetch_read_token_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchLinkableRepositoriesRequest, - dict, -]) -def test_fetch_linkable_repositories_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchLinkableRepositoriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_linkable_repositories(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchLinkableRepositoriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_fetch_linkable_repositories_rest_required_fields(request_type=repositories.FetchLinkableRepositoriesRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["connection"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["connection"] = 'connection_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_linkable_repositories._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "connection" in jsonified_request - assert jsonified_request["connection"] == 'connection_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchLinkableRepositoriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchLinkableRepositoriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_linkable_repositories(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_linkable_repositories_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_linkable_repositories._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("connection", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_linkable_repositories") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_linkable_repositories") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchLinkableRepositoriesRequest.pb(repositories.FetchLinkableRepositoriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchLinkableRepositoriesResponse.to_json(repositories.FetchLinkableRepositoriesResponse()) - - request = repositories.FetchLinkableRepositoriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchLinkableRepositoriesResponse() - - client.fetch_linkable_repositories(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_linkable_repositories_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchLinkableRepositoriesRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_linkable_repositories(request) - - -def test_fetch_linkable_repositories_rest_pager(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - repositories.Repository(), - ], - next_page_token='abc', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[], - next_page_token='def', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - ], - next_page_token='ghi', - ), - repositories.FetchLinkableRepositoriesResponse( - repositories=[ - repositories.Repository(), - repositories.Repository(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(repositories.FetchLinkableRepositoriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'connection': 'projects/sample1/locations/sample2/connections/sample3'} - - pager = client.fetch_linkable_repositories(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, repositories.Repository) - for i in results) - - pages = list(client.fetch_linkable_repositories(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - repositories.FetchGitRefsRequest, - dict, -]) -def test_fetch_git_refs_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse( - ref_names=['ref_names_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.fetch_git_refs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, repositories.FetchGitRefsResponse) - assert response.ref_names == ['ref_names_value'] - - -def test_fetch_git_refs_rest_required_fields(request_type=repositories.FetchGitRefsRequest): - transport_class = transports.RepositoryManagerRestTransport - - request_init = {} - request_init["repository"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["repository"] = 'repository_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_git_refs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("ref_type", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "repository" in jsonified_request - assert jsonified_request["repository"] == 'repository_value' - - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.fetch_git_refs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_git_refs_rest_unset_required_fields(): - transport = transports.RepositoryManagerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_git_refs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("refType", )) & set(("repository", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_git_refs_rest_interceptors(null_interceptor): - transport = transports.RepositoryManagerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.RepositoryManagerRestInterceptor(), - ) - client = RepositoryManagerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs") as post, \ - mock.patch.object(transports.RepositoryManagerRestInterceptor, "pre_fetch_git_refs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = repositories.FetchGitRefsRequest.pb(repositories.FetchGitRefsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = repositories.FetchGitRefsResponse.to_json(repositories.FetchGitRefsResponse()) - - request = repositories.FetchGitRefsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = repositories.FetchGitRefsResponse() - - client.fetch_git_refs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_fetch_git_refs_rest_bad_request(transport: str = 'rest', request_type=repositories.FetchGitRefsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.fetch_git_refs(request) - - -def test_fetch_git_refs_rest_flattened(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = repositories.FetchGitRefsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'repository': 'projects/sample1/locations/sample2/connections/sample3/repositories/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - repository='repository_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = repositories.FetchGitRefsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.fetch_git_refs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{repository=projects/*/locations/*/connections/*/repositories/*}:fetchGitRefs" % client.transport._host, args[1]) - - -def test_fetch_git_refs_rest_flattened_error(transport: str = 'rest'): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_git_refs( - repositories.FetchGitRefsRequest(), - repository='repository_value', - ) - - -def test_fetch_git_refs_rest_error(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RepositoryManagerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = RepositoryManagerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RepositoryManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.RepositoryManagerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - transports.RepositoryManagerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = RepositoryManagerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RepositoryManagerGrpcTransport, - ) - -def test_repository_manager_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RepositoryManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_repository_manager_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.RepositoryManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_connection', - 'get_connection', - 'list_connections', - 'update_connection', - 'delete_connection', - 'create_repository', - 'batch_create_repositories', - 'get_repository', - 'list_repositories', - 'delete_repository', - 'fetch_read_write_token', - 'fetch_read_token', - 'fetch_linkable_repositories', - 'fetch_git_refs', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_repository_manager_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RepositoryManagerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_repository_manager_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.devtools.cloudbuild_v2.services.repository_manager.transports.RepositoryManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RepositoryManagerTransport() - adc.assert_called_once() - - -def test_repository_manager_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RepositoryManagerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - ], -) -def test_repository_manager_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RepositoryManagerGrpcTransport, - transports.RepositoryManagerGrpcAsyncIOTransport, - transports.RepositoryManagerRestTransport, - ], -) -def test_repository_manager_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.RepositoryManagerGrpcTransport, grpc_helpers), - (transports.RepositoryManagerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_repository_manager_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_repository_manager_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.RepositoryManagerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_repository_manager_rest_lro_client(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_repository_manager_host_no_port(transport_name): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_repository_manager_host_with_port(transport_name): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudbuild.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudbuild.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_repository_manager_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = RepositoryManagerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = RepositoryManagerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_connection._session - session2 = client2.transport.create_connection._session - assert session1 != session2 - session1 = client1.transport.get_connection._session - session2 = client2.transport.get_connection._session - assert session1 != session2 - session1 = client1.transport.list_connections._session - session2 = client2.transport.list_connections._session - assert session1 != session2 - session1 = client1.transport.update_connection._session - session2 = client2.transport.update_connection._session - assert session1 != session2 - session1 = client1.transport.delete_connection._session - session2 = client2.transport.delete_connection._session - assert session1 != session2 - session1 = client1.transport.create_repository._session - session2 = client2.transport.create_repository._session - assert session1 != session2 - session1 = client1.transport.batch_create_repositories._session - session2 = client2.transport.batch_create_repositories._session - assert session1 != session2 - session1 = client1.transport.get_repository._session - session2 = client2.transport.get_repository._session - assert session1 != session2 - session1 = client1.transport.list_repositories._session - session2 = client2.transport.list_repositories._session - assert session1 != session2 - session1 = client1.transport.delete_repository._session - session2 = client2.transport.delete_repository._session - assert session1 != session2 - session1 = client1.transport.fetch_read_write_token._session - session2 = client2.transport.fetch_read_write_token._session - assert session1 != session2 - session1 = client1.transport.fetch_read_token._session - session2 = client2.transport.fetch_read_token._session - assert session1 != session2 - session1 = client1.transport.fetch_linkable_repositories._session - session2 = client2.transport.fetch_linkable_repositories._session - assert session1 != session2 - session1 = client1.transport.fetch_git_refs._session - session2 = client2.transport.fetch_git_refs._session - assert session1 != session2 -def test_repository_manager_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RepositoryManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_repository_manager_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RepositoryManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.RepositoryManagerGrpcTransport, transports.RepositoryManagerGrpcAsyncIOTransport]) -def test_repository_manager_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_repository_manager_grpc_lro_client(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_repository_manager_grpc_lro_async_client(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_connection_path(): - project = "squid" - location = "clam" - connection = "whelk" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = RepositoryManagerClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection": "nudibranch", - } - path = RepositoryManagerClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_connection_path(path) - assert expected == actual - -def test_repository_path(): - project = "cuttlefish" - location = "mussel" - connection = "winkle" - repository = "nautilus" - expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format(project=project, location=location, connection=connection, repository=repository, ) - actual = RepositoryManagerClient.repository_path(project, location, connection, repository) - assert expected == actual - - -def test_parse_repository_path(): - expected = { - "project": "scallop", - "location": "abalone", - "connection": "squid", - "repository": "clam", - } - path = RepositoryManagerClient.repository_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_repository_path(path) - assert expected == actual - -def test_secret_version_path(): - project = "whelk" - secret = "octopus" - version = "oyster" - expected = "projects/{project}/secrets/{secret}/versions/{version}".format(project=project, secret=secret, version=version, ) - actual = RepositoryManagerClient.secret_version_path(project, secret, version) - assert expected == actual - - -def test_parse_secret_version_path(): - expected = { - "project": "nudibranch", - "secret": "cuttlefish", - "version": "mussel", - } - path = RepositoryManagerClient.secret_version_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_secret_version_path(path) - assert expected == actual - -def test_service_path(): - project = "winkle" - location = "nautilus" - namespace = "scallop" - service = "abalone" - expected = "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format(project=project, location=location, namespace=namespace, service=service, ) - actual = RepositoryManagerClient.service_path(project, location, namespace, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "squid", - "location": "clam", - "namespace": "whelk", - "service": "octopus", - } - path = RepositoryManagerClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = RepositoryManagerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = RepositoryManagerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = RepositoryManagerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = RepositoryManagerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = RepositoryManagerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = RepositoryManagerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = RepositoryManagerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = RepositoryManagerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = RepositoryManagerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = RepositoryManagerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = RepositoryManagerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.RepositoryManagerTransport, '_prep_wrapped_messages') as prep: - transport_class = RepositoryManagerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/connections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_cancel_operation(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = RepositoryManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = RepositoryManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (RepositoryManagerClient, transports.RepositoryManagerGrpcTransport), - (RepositoryManagerAsyncClient, transports.RepositoryManagerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index c9173566..f5fb0f8c 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -12380,22 +12380,19 @@ def test_parse_build_path(): def test_build_trigger_path(): project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format( + trigger = "nudibranch" + expected = "projects/{project}/triggers/{trigger}".format( project=project, - location=location, trigger=trigger, ) - actual = CloudBuildClient.build_trigger_path(project, location, trigger) + actual = CloudBuildClient.build_trigger_path(project, trigger) assert expected == actual def test_parse_build_trigger_path(): expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", + "project": "cuttlefish", + "trigger": "mussel", } path = CloudBuildClient.build_trigger_path(**expected) @@ -12405,10 +12402,10 @@ def test_parse_build_trigger_path(): def test_crypto_key_path(): - project = "scallop" - location = "abalone" - keyring = "squid" - key = "clam" + project = "winkle" + location = "nautilus" + keyring = "scallop" + key = "abalone" expected = "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}".format( project=project, location=location, @@ -12421,10 +12418,10 @@ def test_crypto_key_path(): def test_parse_crypto_key_path(): expected = { - "project": "whelk", - "location": "octopus", - "keyring": "oyster", - "key": "nudibranch", + "project": "squid", + "location": "clam", + "keyring": "whelk", + "key": "octopus", } path = CloudBuildClient.crypto_key_path(**expected) @@ -12434,8 +12431,8 @@ def test_parse_crypto_key_path(): def test_network_path(): - project = "cuttlefish" - network = "mussel" + project = "oyster" + network = "nudibranch" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -12446,8 +12443,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "winkle", - "network": "nautilus", + "project": "cuttlefish", + "network": "mussel", } path = CloudBuildClient.network_path(**expected) @@ -12457,10 +12454,10 @@ def test_parse_network_path(): def test_repository_path(): - project = "scallop" - location = "abalone" - connection = "squid" - repository = "clam" + project = "winkle" + location = "nautilus" + connection = "scallop" + repository = "abalone" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -12473,10 +12470,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "whelk", - "location": "octopus", - "connection": "oyster", - "repository": "nudibranch", + "project": "squid", + "location": "clam", + "connection": "whelk", + "repository": "octopus", } path = CloudBuildClient.repository_path(**expected) @@ -12486,9 +12483,9 @@ def test_parse_repository_path(): def test_secret_version_path(): - project = "cuttlefish" - secret = "mussel" - version = "winkle" + project = "oyster" + secret = "nudibranch" + version = "cuttlefish" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -12500,9 +12497,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "nautilus", - "secret": "scallop", - "version": "abalone", + "project": "mussel", + "secret": "winkle", + "version": "nautilus", } path = CloudBuildClient.secret_version_path(**expected) @@ -12512,8 +12509,8 @@ def test_parse_secret_version_path(): def test_service_account_path(): - project = "squid" - service_account = "clam" + project = "scallop" + service_account = "abalone" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -12524,8 +12521,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "whelk", - "service_account": "octopus", + "project": "squid", + "service_account": "clam", } path = CloudBuildClient.service_account_path(**expected) @@ -12535,8 +12532,8 @@ def test_parse_service_account_path(): def test_subscription_path(): - project = "oyster" - subscription = "nudibranch" + project = "whelk" + subscription = "octopus" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -12547,8 +12544,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "cuttlefish", - "subscription": "mussel", + "project": "oyster", + "subscription": "nudibranch", } path = CloudBuildClient.subscription_path(**expected) @@ -12558,8 +12555,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "winkle" - topic = "nautilus" + project = "cuttlefish" + topic = "mussel" expected = "projects/{project}/topics/{topic}".format( project=project, topic=topic, @@ -12570,8 +12567,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "scallop", - "topic": "abalone", + "project": "winkle", + "topic": "nautilus", } path = CloudBuildClient.topic_path(**expected) @@ -12581,9 +12578,9 @@ def test_parse_topic_path(): def test_worker_pool_path(): - project = "squid" - location = "clam" - worker_pool = "whelk" + project = "scallop" + location = "abalone" + worker_pool = "squid" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -12597,9 +12594,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "octopus", - "location": "oyster", - "worker_pool": "nudibranch", + "project": "clam", + "location": "whelk", + "worker_pool": "octopus", } path = CloudBuildClient.worker_pool_path(**expected) @@ -12609,7 +12606,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12619,7 +12616,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nudibranch", } path = CloudBuildClient.common_billing_account_path(**expected) @@ -12629,7 +12626,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -12639,7 +12636,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "mussel", } path = CloudBuildClient.common_folder_path(**expected) @@ -12649,7 +12646,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -12659,7 +12656,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nautilus", } path = CloudBuildClient.common_organization_path(**expected) @@ -12669,7 +12666,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -12679,7 +12676,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "abalone", } path = CloudBuildClient.common_project_path(**expected) @@ -12689,8 +12686,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12701,8 +12698,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "whelk", + "location": "octopus", } path = CloudBuildClient.common_location_path(**expected)